From e9ef43dad6231c41a093438a65a27c7c2d6b6bb8 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Fri, 4 Oct 2024 14:26:54 +0200 Subject: [PATCH 01/89] Prepare next development version 1.24.0-SNAPSHOT Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- .../resources/io/openlineage/flink/client/version.properties | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- .../resources/io/openlineage/flink/client/version.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- .../spark/shade/extension/v1/lifecycle/plan/version.properties | 2 +- integration/spark/gradle.properties | 2 +- .../resources/io/openlineage/spark/agent/version.properties | 2 +- .../resources/io/openlineage/spark/agent/version.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 31 files changed, 31 insertions(+), 31 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index 0ec27ec054..ab23c07812 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.23.0 +version=1.24.0-SNAPSHOT org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index a58137de2a..9cd1ab461b 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.23.0" +__version__ = "1.24.0" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index ba91ac0f5a..0a696a004f 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.23.0" +version = "1.24.0" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index 2c18d3df04..658f7864db 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index 1983c3c784..ba93b9775b 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.23.0" +__version__ = "1.24.0" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index 4369be8bf4..f7e6f86636 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index 4c6f0fc900..a66254b619 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.23.0" +__version__ = "1.24.0" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index 1983c3c784..ba93b9775b 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.23.0" +__version__ = "1.24.0" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index d76039404d..26079ca734 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index fbea437787..4ca12e86b2 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.23.0" +__version__ = "1.24.0" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index 1983c3c784..ba93b9775b 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.23.0" +__version__ = "1.24.0" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index afebaef69e..8e6b7c08b8 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index c42f052d33..4f1248a385 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.23.0" +__version__ = "1.24.0" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index e7a5489068..71ea95e3e6 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.23.0" +__version__ = "1.24.0" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index 6da9b9ea9c..38e98d41a2 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 463ae6fed0..02ff2d23e0 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.23.0" +__version__ = "1.24.0" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties b/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties index bb5c4f2c34..d91a6c7e15 100644 --- a/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties +++ b/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties @@ -1 +1 @@ -version 1.23.0-SNAPSHOT +version 1.24.0-SNAPSHOT diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index 62f24268aa..a93f661875 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1 +1 @@ -version=1.23.0 +version=1.24.0-SNAPSHOT diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index ebe5c71018..f063b71cf3 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.23.0 +version=1.24.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties b/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties index bb5c4f2c34..d91a6c7e15 100644 --- a/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties +++ b/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties @@ -1 +1 @@ -version 1.23.0-SNAPSHOT +version 1.24.0-SNAPSHOT diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index 9f6a634534..6c90bf4b76 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.23.0 \ No newline at end of file +version=1.24.0-SNAPSHOT \ No newline at end of file diff --git a/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties b/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties index bb5c4f2c34..d91a6c7e15 100644 --- a/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties +++ b/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties @@ -1 +1 @@ -version 1.23.0-SNAPSHOT +version 1.24.0-SNAPSHOT diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index fe746ffae4..68b7e0a957 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.23.0 +version=1.24.0-SNAPSHOT org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties b/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties index bb5c4f2c34..d91a6c7e15 100644 --- a/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties +++ b/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties @@ -1 +1 @@ -version 1.23.0-SNAPSHOT +version 1.24.0-SNAPSHOT diff --git a/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties b/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties index bb5c4f2c34..d91a6c7e15 100644 --- a/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties +++ b/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties @@ -1 +1 @@ -version 1.23.0-SNAPSHOT +version 1.24.0-SNAPSHOT diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index 40329d72a0..87a7dae60b 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.23.0 +current_version = 1.24.0 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index 3e9ca8e8c6..7a60206e75 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.23.0" +version = "1.24.0" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index 62f24268aa..a93f661875 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.23.0 +version=1.24.0-SNAPSHOT diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index 2b4f28a9e0..bc1b47d37a 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.23.0" +version = "1.24.0" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index 4e6977556a..cb92237121 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.23.0" +version = "1.24.0" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Mon, 7 Oct 2024 08:18:39 +0200 Subject: [PATCH 02/89] Bump the client-java group in /client/java with 8 updates (#3137) Bumps the client-java group in /client/java with 8 updates: | Package | From | To | | --- | --- | --- | | [io.micrometer:micrometer-core](https://github.com/micrometer-metrics/micrometer) | `1.13.4` | `1.13.5` | | [io.micrometer:micrometer-registry-statsd](https://github.com/micrometer-metrics/micrometer) | `1.13.4` | `1.13.5` | | [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5) | `5.11.1` | `5.11.2` | | [com.gradleup.shadow](https://github.com/GradleUp/shadow) | `8.3.2` | `8.3.3` | | [io.freefair.lombok](https://github.com/freefair/gradle-plugins) | `8.10` | `8.10.2` | | [com.google.cloud:google-cloud-nio](https://github.com/googleapis/java-storage-nio) | `0.127.23` | `0.127.24` | | software.amazon.awssdk:bom | `2.28.11` | `2.28.16` | | [org.testcontainers:testcontainers-bom](https://github.com/testcontainers/testcontainers-java) | `1.19.3` | `1.20.2` | Updates `io.micrometer:micrometer-core` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `org.junit.jupiter:junit-jupiter` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `com.gradleup.shadow` from 8.3.2 to 8.3.3 - [Release notes](https://github.com/GradleUp/shadow/releases) - [Commits](https://github.com/GradleUp/shadow/compare/8.3.2...8.3.3) Updates `io.freefair.lombok` from 8.10 to 8.10.2 - [Release notes](https://github.com/freefair/gradle-plugins/releases) - [Commits](https://github.com/freefair/gradle-plugins/compare/8.10...8.10.2) Updates `com.google.cloud:google-cloud-nio` from 0.127.23 to 0.127.24 - [Release notes](https://github.com/googleapis/java-storage-nio/releases) - [Changelog](https://github.com/googleapis/java-storage-nio/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/java-storage-nio/compare/v0.127.23...v0.127.24) Updates `software.amazon.awssdk:bom` from 2.28.11 to 2.28.16 Updates `org.testcontainers:testcontainers-bom` from 1.19.3 to 1.20.2 - [Release notes](https://github.com/testcontainers/testcontainers-java/releases) - [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.19.3...1.20.2) --- updated-dependencies: - dependency-name: io.micrometer:micrometer-core dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.gradleup.shadow dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: io.freefair.lombok dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-nio dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: org.testcontainers:testcontainers-bom dependency-type: direct:production update-type: version-update:semver-minor dependency-group: client-java ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- client/java/build.gradle | 8 ++++---- client/java/transports-dataplex/build.gradle | 4 ++-- client/java/transports-gcs/build.gradle | 6 +++--- client/java/transports-s3/build.gradle | 8 ++++---- 4 files changed, 13 insertions(+), 13 deletions(-) diff --git a/client/java/build.gradle b/client/java/build.gradle index 871a7d0f85..9c44c0dd11 100644 --- a/client/java/build.gradle +++ b/client/java/build.gradle @@ -16,9 +16,9 @@ plugins { // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' id "org.openapi.generator" version "6.6.0" - id "com.gradleup.shadow" version "8.3.2" + id "com.gradleup.shadow" version "8.3.3" id "pmd" - id "io.freefair.lombok" version "8.10" + id "io.freefair.lombok" version "8.10.2" id 'io.github.gradle-nexus.publish-plugin' version '2.0.0' } @@ -48,10 +48,10 @@ repositories { ext { assertjVersion = '3.26.3' jacksonVersion = "2.15.3" - junit5Version = '5.11.1' + junit5Version = '5.11.2' lombokVersion = '1.18.34' mockitoVersion = '5.2.0' - micrometerVersion = '1.13.4' + micrometerVersion = '1.13.5' isReleaseVersion = !version.endsWith('SNAPSHOT') guavaVersion = '33.3.1-jre' } diff --git a/client/java/transports-dataplex/build.gradle b/client/java/transports-dataplex/build.gradle index cab98ea11d..1d96ada927 100644 --- a/client/java/transports-dataplex/build.gradle +++ b/client/java/transports-dataplex/build.gradle @@ -13,9 +13,9 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.2" + id "com.gradleup.shadow" version "8.3.3" id "pmd" - id "io.freefair.lombok" version "8.10" + id "io.freefair.lombok" version "8.10.2" } ext { diff --git a/client/java/transports-gcs/build.gradle b/client/java/transports-gcs/build.gradle index 873f56b0cf..78e3db7470 100644 --- a/client/java/transports-gcs/build.gradle +++ b/client/java/transports-gcs/build.gradle @@ -13,9 +13,9 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.2" + id "com.gradleup.shadow" version "8.3.3" id "pmd" - id "io.freefair.lombok" version "8.10" + id "io.freefair.lombok" version "8.10.2" } ext { @@ -25,7 +25,7 @@ ext { dependencies { implementation('com.google.cloud:google-cloud-storage:2.43.1') - testImplementation('com.google.cloud:google-cloud-nio:0.127.23') + testImplementation('com.google.cloud:google-cloud-nio:0.127.24') } shadowJar { diff --git a/client/java/transports-s3/build.gradle b/client/java/transports-s3/build.gradle index 4927d56391..c64f24fb50 100644 --- a/client/java/transports-s3/build.gradle +++ b/client/java/transports-s3/build.gradle @@ -13,16 +13,16 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.2" + id "com.gradleup.shadow" version "8.3.3" id "pmd" - id "io.freefair.lombok" version "8.10" + id "io.freefair.lombok" version "8.10.2" id "com.github.gmazzo.buildconfig" version "5.5.0" } ext { projectDescription = "S3 OpenLineage transport library" s3MockVersion = "3.11.0" - testcontainersVersion = "1.19.3" + testcontainersVersion = "1.20.2" } sourceSets { @@ -37,7 +37,7 @@ sourceSets { dependencies { compileOnly("com.google.code.findbugs:jsr305:3.0.2") - implementation(platform("software.amazon.awssdk:bom:2.28.11")) + implementation(platform("software.amazon.awssdk:bom:2.28.16")) implementation("software.amazon.awssdk:auth") implementation("software.amazon.awssdk:s3") implementation("software.amazon.awssdk:url-connection-client") From 8ae35c8b13bf3dec6e80a84e205221bff5fb5cc9 Mon Sep 17 00:00:00 2001 From: Julien Phalip Date: Mon, 7 Oct 2024 02:34:04 -0500 Subject: [PATCH 03/89] Require Java 17 only for Spark 4.0 (#3121) Signed-off-by: Julien Phalip --- .../io/openlineage/gradle/plugin/CommonConfigPlugin.kt | 4 ---- integration/spark/spark40/build.gradle | 6 ++++++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/CommonConfigPlugin.kt b/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/CommonConfigPlugin.kt index feb8e66f1a..2f6672046f 100644 --- a/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/CommonConfigPlugin.kt +++ b/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/CommonConfigPlugin.kt @@ -87,10 +87,6 @@ class CommonConfigPlugin : Plugin { // never run compile on CI without property being set throw RuntimeException("java.compile.home should be always set on CI env") } - - if (!target.hasProperty("java.compile.home") && JavaVersion.current() < JavaVersion.VERSION_17) { - throw RuntimeException("This project will not compile with Java version below 17.") - } } } diff --git a/integration/spark/spark40/build.gradle b/integration/spark/spark40/build.gradle index ff7a84bdd9..ad63cc45c7 100644 --- a/integration/spark/spark40/build.gradle +++ b/integration/spark/spark40/build.gradle @@ -13,6 +13,12 @@ scalaVariants { create("2.13") } +java { + toolchain { + languageVersion = JavaLanguageVersion.of(17) + } +} + idea { module { testSources.from(sourceSets.testScala213.java.srcDirs) From 31e82464b3ca66ad5886941c86f542f107175b99 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 11:13:31 +0200 Subject: [PATCH 04/89] Bump the integration-spark group in /integration/spark with 7 updates (#3136) Bumps the integration-spark group in /integration/spark with 7 updates: | Package | From | To | | --- | --- | --- | | [io.micrometer:micrometer-core](https://github.com/micrometer-metrics/micrometer) | `1.13.4` | `1.13.5` | | [io.micrometer:micrometer-registry-statsd](https://github.com/micrometer-metrics/micrometer) | `1.13.4` | `1.13.5` | | [org.junit.jupiter:junit-jupiter-api](https://github.com/junit-team/junit5) | `5.11.1` | `5.11.2` | | [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5) | `5.11.1` | `5.11.2` | | [org.junit:junit-bom](https://github.com/junit-team/junit5) | `5.11.1` | `5.11.2` | | [org.testcontainers:testcontainers-bom](https://github.com/testcontainers/testcontainers-java) | `1.20.1` | `1.20.2` | | [org.apache.logging.log4j:log4j-bom](https://github.com/apache/logging-log4j2) | `2.24.0` | `2.24.1` | Updates `io.micrometer:micrometer-core` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `org.junit.jupiter:junit-jupiter-api` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `org.junit.jupiter:junit-jupiter` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `org.junit:junit-bom` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `org.junit.jupiter:junit-jupiter` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `org.junit:junit-bom` from 5.11.1 to 5.11.2 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.1...r5.11.2) Updates `org.testcontainers:testcontainers-bom` from 1.20.1 to 1.20.2 - [Release notes](https://github.com/testcontainers/testcontainers-java/releases) - [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.20.1...1.20.2) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.4 to 1.13.5 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.4...v1.13.5) Updates `org.apache.logging.log4j:log4j-bom` from 2.24.0 to 2.24.1 - [Release notes](https://github.com/apache/logging-log4j2/releases) - [Changelog](https://github.com/apache/logging-log4j2/blob/2.x/RELEASE-NOTES.adoc) - [Commits](https://github.com/apache/logging-log4j2/compare/rel/2.24.0...rel/2.24.1) --- updated-dependencies: - dependency-name: io.micrometer:micrometer-core dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter-api dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit:junit-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit:junit-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.testcontainers:testcontainers-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.apache.logging.log4j:log4j-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- integration/spark/app/build.gradle | 8 ++++---- integration/spark/shared/build.gradle | 4 ++-- integration/spark/spark2/build.gradle | 4 ++-- integration/spark/spark3/build.gradle | 4 ++-- integration/spark/spark31/build.gradle | 4 ++-- integration/spark/spark32/build.gradle | 4 ++-- integration/spark/spark33/build.gradle | 4 ++-- integration/spark/spark34/build.gradle | 4 ++-- integration/spark/spark35/build.gradle | 4 ++-- integration/spark/spark40/build.gradle | 4 ++-- integration/spark/vendor/snowflake/build.gradle | 4 ++-- 11 files changed, 24 insertions(+), 24 deletions(-) diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 3e2b9276af..12ceb9ec6f 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -25,16 +25,16 @@ ext { assertjVersion = '3.26.3' bigqueryVersion = '0.35.1' - junit5Version = '5.11.1' + junit5Version = '5.11.2' mockitoVersion = '4.11.0' postgresqlVersion = '42.7.4' - testcontainersVersion = '1.20.1' + testcontainersVersion = '1.20.2' configurableTestConfig = [ sparkConfFile: project.findProperty('spark.conf.file') ?: System.getProperty('spark.conf.file'), hostDir: project.findProperty('host.dir') ?: System.getProperty('host.dir'), testDir: project.findProperty('test.dir') ?: System.getProperty('test.dir') ] - micrometerVersion = '1.13.4' + micrometerVersion = '1.13.5' } // This workaround is needed because the version of Snappy that Spark 2.4.x runs with, @@ -136,7 +136,7 @@ dependencies { testImplementation('net.javacrumbs.json-unit:json-unit-core:2.38.0') testRuntimeOnly("org.slf4j:slf4j-api:2.0.16") - testRuntimeOnly(platform("org.apache.logging.log4j:log4j-bom:2.24.0")) + testRuntimeOnly(platform("org.apache.logging.log4j:log4j-bom:2.24.1")) testRuntimeOnly("org.apache.logging.log4j:log4j-api") testRuntimeOnly("org.apache.logging.log4j:log4j-core") testRuntimeOnly("org.apache.logging.log4j:log4j-slf4j-impl") diff --git a/integration/spark/shared/build.gradle b/integration/spark/shared/build.gradle index e761c1ffa0..3816c599a3 100644 --- a/integration/spark/shared/build.gradle +++ b/integration/spark/shared/build.gradle @@ -29,9 +29,9 @@ ext { awaitilityVersion = "4.2.2" bigqueryVersion = "0.41.0" databricksVersion = "0.1.4" - junit5Version = "5.11.1" + junit5Version = "5.11.2" kafkaClientsVersion = "3.8.0" - micrometerVersion = '1.13.4' + micrometerVersion = '1.13.5' mockitoVersion = "4.11.0" mockserverVersion = "5.14.0" postgresqlVersion = "42.7.4" diff --git a/integration/spark/spark2/build.gradle b/integration/spark/spark2/build.gradle index b919b22f78..f76de18900 100644 --- a/integration/spark/spark2/build.gradle +++ b/integration/spark/spark2/build.gradle @@ -25,10 +25,10 @@ ext { deltaVersion = "1.1.0" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" postgresqlVersion = "42.7.4" - micrometerVersion = '1.13.4' + micrometerVersion = '1.13.5' sparkVersion = project.findProperty("spark2.spark.version") scalaBinaryVersion = "2.11" diff --git a/integration/spark/spark3/build.gradle b/integration/spark/spark3/build.gradle index 70ed3a8386..d5b0052849 100644 --- a/integration/spark/spark3/build.gradle +++ b/integration/spark/spark3/build.gradle @@ -27,10 +27,10 @@ ext { deltaVersion = "1.1.0" icebergVersion = "1.4.3" jacksonVersion = "2.15.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" lombokVersion = "1.18.30" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark3.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark31/build.gradle b/integration/spark/spark31/build.gradle index 6c4941b992..7ca412f9f8 100644 --- a/integration/spark/spark31/build.gradle +++ b/integration/spark/spark31/build.gradle @@ -16,9 +16,9 @@ scalaVariants { ext { assertjVersion = "3.26.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark31.spark.version") } diff --git a/integration/spark/spark32/build.gradle b/integration/spark/spark32/build.gradle index d38b38837b..d697dc74b4 100644 --- a/integration/spark/spark32/build.gradle +++ b/integration/spark/spark32/build.gradle @@ -25,9 +25,9 @@ ext { deltaVersion = "1.1.0" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark32.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark33/build.gradle b/integration/spark/spark33/build.gradle index bcc51acad1..be4367382c 100644 --- a/integration/spark/spark33/build.gradle +++ b/integration/spark/spark33/build.gradle @@ -24,9 +24,9 @@ ext { assertjVersion = "3.26.3" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark33.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark34/build.gradle b/integration/spark/spark34/build.gradle index ec13201049..e3c317c44f 100644 --- a/integration/spark/spark34/build.gradle +++ b/integration/spark/spark34/build.gradle @@ -25,9 +25,9 @@ ext { deltaVersion = "2.4.0" icebergVersion = "1.3.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark34.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark35/build.gradle b/integration/spark/spark35/build.gradle index 0610d41935..36314f18ba 100644 --- a/integration/spark/spark35/build.gradle +++ b/integration/spark/spark35/build.gradle @@ -23,9 +23,9 @@ idea { ext { assertjVersion = "3.26.3" commonsLangVersion = "3.12.0" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark35.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark40/build.gradle b/integration/spark/spark40/build.gradle index ad63cc45c7..53e7af1067 100644 --- a/integration/spark/spark40/build.gradle +++ b/integration/spark/spark40/build.gradle @@ -28,9 +28,9 @@ idea { ext { assertjVersion = "3.26.3" commonsLangVersion = "3.12.0" - junit5Version = "5.11.1" + junit5Version = "5.11.2" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.4" + micrometerVersion = "1.13.5" sparkVersion = project.findProperty("spark40.spark.version") + "-preview1" scalaBinaryVersion = "2.13" diff --git a/integration/spark/vendor/snowflake/build.gradle b/integration/spark/vendor/snowflake/build.gradle index 9f4e600edf..fad94f4849 100644 --- a/integration/spark/vendor/snowflake/build.gradle +++ b/integration/spark/vendor/snowflake/build.gradle @@ -7,11 +7,11 @@ plugins { ext { assertjVersion = '3.26.3' - junit5Version = '5.11.1' + junit5Version = '5.11.2' snowflakeVersion = '2.13.0' mockitoVersion = '4.11.0' lombokVersion = '1.18.30' - micrometerVersion = '1.13.4' + micrometerVersion = '1.13.5' snowflakeLookup = [ '2.4.8': '2.9.3', From 1286b8ca5f08296b531b61e866a8b760de1ede1d Mon Sep 17 00:00:00 2001 From: "pawel.leszczynski" Date: Tue, 8 Oct 2024 12:57:53 +0200 Subject: [PATCH 05/89] avoid tests in configurable test (#3141) Signed-off-by: Pawel Leszczynski --- integration/spark/cli/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration/spark/cli/Dockerfile b/integration/spark/cli/Dockerfile index cc26e70005..93a3e8e1bd 100644 --- a/integration/spark/cli/Dockerfile +++ b/integration/spark/cli/Dockerfile @@ -8,7 +8,7 @@ RUN apk add --update alpine-sdk RUN apk --no-cache add curl cargo build-base bash RUN \ cd /usr/lib/openlineage/client/java && \ - ./gradlew --no-daemon shadowJar publishToMavenLocal && \ + ./gradlew --no-daemon -x test shadowJar publishToMavenLocal && \ cd /usr/lib/openlineage/integration/spark-extension-entrypoint && \ ./gradlew --no-daemon jar publishToMavenLocal && \ cd /usr/lib/openlineage/integration/sql/iface-java && \ From 1482805328cda070e23272e4c005f14920d38578 Mon Sep 17 00:00:00 2001 From: Julien Phalip Date: Tue, 8 Oct 2024 22:58:51 -0700 Subject: [PATCH 06/89] Add extra CLL tests (#3085) * Add extra CLL tests Signed-off-by: Julien Phalip --- .../column/ColumnLevelLineageTestUtils.java | 45 +++++++ ...rmationTypesOnlyFieldDependenciesTest.java | 31 +++-- ...umnLineageWithTransformationTypesTest.java | 112 +++++++++++++++--- 3 files changed, 161 insertions(+), 27 deletions(-) diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLevelLineageTestUtils.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLevelLineageTestUtils.java index ec4765fe39..0421126648 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLevelLineageTestUtils.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLevelLineageTestUtils.java @@ -131,4 +131,49 @@ static void assertColumnDependsOnInputs( expectedAmountOfInputs, facet.getFields().getAdditionalProperties().get(outputColumn).getInputFields().size()); } + + static int countColumnDependencies(OpenLineage.ColumnLineageDatasetFacet facet) { + return countColumnDependencies(facet, null); + } + + static int countColumnDependencies( + OpenLineage.ColumnLineageDatasetFacet facet, String outputColumn) { + int count = 0; + for (String column : facet.getFields().getAdditionalProperties().keySet()) { + if (outputColumn == null || column.equals(outputColumn)) { + List inputFields = + facet.getFields().getAdditionalProperties().get(column).getInputFields(); + for (OpenLineage.InputField inputField : inputFields) { + count += inputField.getTransformations().size(); + } + } + } + return count; + } + + static void assertCountColumnDependencies( + OpenLineage.ColumnLineageDatasetFacet facet, int expected) { + assertEquals(expected, countColumnDependencies(facet)); + } + + static void assertCountColumnDependencies( + OpenLineage.ColumnLineageDatasetFacet facet, String outputColumn, int expected) { + assertEquals(expected, countColumnDependencies(facet, outputColumn)); + } + + static int countDatasetDependencies(OpenLineage.ColumnLineageDatasetFacet facet) { + int count = 0; + List inputFields = facet.getDataset(); + if (inputFields != null) { + for (OpenLineage.InputField inputField : inputFields) { + count += inputField.getTransformations().size(); + } + } + return count; + } + + static void assertCountDatasetDependencies( + OpenLineage.ColumnLineageDatasetFacet facet, int expected) { + assertEquals(expected, countDatasetDependencies(facet)); + } } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesOnlyFieldDependenciesTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesOnlyFieldDependenciesTest.java index 7a85ca6dab..d948df1983 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesOnlyFieldDependenciesTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesOnlyFieldDependenciesTest.java @@ -5,8 +5,7 @@ package io.openlineage.spark.agent.column; -import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.assertAllColumnsDependsOnType; -import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.assertColumnDependsOnType; +import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.*; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.FILTER; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.GROUP_BY; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.JOIN; @@ -48,6 +47,7 @@ // TODO #3084: Remove when the column lineage has dataset dependencies flag removed @Slf4j @EnabledIfSystemProperty(named = "spark.version", matches = "([34].*)") +@SuppressWarnings("PMD.JUnitTestContainsTooManyAsserts") class ColumnLineageWithTransformationTypesOnlyFieldDependenciesTest { private static final String FILE = "file"; @@ -120,12 +120,12 @@ void simpleQueryIndirect() { createTable("t1", "a;int", "b;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery(getSchemaFacet("a;int"), "SELECT a FROM t1 WHERE b > 1"); - + assertCountColumnDependencies(facet, 2); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); - assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(FILTER)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -134,10 +134,9 @@ void simpleQueryMultipleIndirect() { OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery( getSchemaFacet("a;int"), "SELECT a FROM t1 WHERE b > 1 GROUP BY a, c ORDER BY c"); - + assertCountColumnDependencies(facet, 5); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); - assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( @@ -146,6 +145,7 @@ void simpleQueryMultipleIndirect() { facet, "a", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.indirect(SORT)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -161,17 +161,28 @@ void simpleQueryMasking() { + "sum(b) as a, " + "sha1(string(sum(b))) as ma " + "FROM t1 GROUP BY a"); - + assertCountColumnDependencies(facet, 10); assertColumnDependsOnType( facet, "i", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); + assertColumnDependsOnType( + facet, "i", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( facet, "t", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); + assertColumnDependsOnType( + facet, "t", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( facet, "mt", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation(true)); + assertColumnDependsOnType( + facet, "mt", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation()); + assertColumnDependsOnType( + facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertColumnDependsOnType( facet, "ma", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation(true)); + assertColumnDependsOnType( + facet, "ma", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -186,7 +197,9 @@ void complexQueryCTEJoinsFilter() { + "tmp2 as (SELECT * FROM t2 where c = 1),\n " + "tmp3 as (SELECT tmp.a, b, c from tmp join tmp2 on tmp.a = tmp2.a)\n " + "SELECT tmp3.a as a, b, c, d FROM tmp3 join t3 on tmp3.a = t3.a order by d"); - + // TODO: There are appears to be bug here. + // Normally this should work: assertCountColumnDependencies(facet, 28); + // However, there appears to be 44 dataset dependencies (i.e. 16 too many). assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( @@ -195,7 +208,6 @@ void complexQueryCTEJoinsFilter() { facet, "c", FILE, T2_EXPECTED_NAME, "c", TransformationInfo.identity()); assertColumnDependsOnType( facet, "d", FILE, T3_EXPECTED_NAME, "d", TransformationInfo.identity()); - assertAllColumnsDependsOnType( facet, Arrays.asList("a", "b", "c", "d"), @@ -238,6 +250,7 @@ void complexQueryCTEJoinsFilter() { T3_EXPECTED_NAME, "d", TransformationInfo.indirect(SORT)); + assertCountDatasetDependencies(facet, 0); } @NotNull diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesTest.java index 1e857d743c..840064d138 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/column/ColumnLineageWithTransformationTypesTest.java @@ -5,8 +5,7 @@ package io.openlineage.spark.agent.column; -import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.assertColumnDependsOnType; -import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.assertDatasetDependsOnType; +import static io.openlineage.spark.agent.column.ColumnLevelLineageTestUtils.*; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.CONDITIONAL; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.FILTER; import static io.openlineage.spark.agent.lifecycle.plan.column.TransformationInfo.Subtypes.GROUP_BY; @@ -49,6 +48,7 @@ @Slf4j @EnabledIfSystemProperty(named = "spark.version", matches = "([34].*)") +@SuppressWarnings("PMD.JUnitTestContainsTooManyAsserts") class ColumnLineageWithTransformationTypesTest { private static final String FILE = "file"; @@ -120,9 +120,10 @@ void simpleQueryOnlyIdentity() { createTable("t1", "a;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery(getSchemaFacet("a;int"), "SELECT a FROM t1"); - + assertCountColumnDependencies(facet, 1); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); + assertCountDatasetDependencies(facet, 0); } @Test @@ -131,13 +132,14 @@ void simpleQueryOnlyTransform() { OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery( getSchemaFacet("a;int", "b;int"), "SELECT concat(a, 'test') AS a, a+b as b FROM t1"); - + assertCountColumnDependencies(facet, 3); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); assertColumnDependsOnType( facet, "b", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); assertColumnDependsOnType( facet, "b", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.transformation()); + assertCountDatasetDependencies(facet, 0); } @Test @@ -145,8 +147,10 @@ void simpleQueryOnlyAggregation() { createTable("t1", "a;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery(getSchemaFacet("a;int"), "SELECT count(a) AS a FROM t1"); + assertCountColumnDependencies(facet, 1); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.aggregation(true)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -154,10 +158,10 @@ void simpleQueryIndirect() { createTable("t1", "a;int", "b;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery(getSchemaFacet("a;int"), "SELECT a FROM t1 WHERE b > 1"); - + assertCountColumnDependencies(facet, 1); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); - + assertCountDatasetDependencies(facet, 1); assertDatasetDependsOnType( facet, FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(FILTER)); } @@ -167,11 +171,14 @@ void simpleQueryMultipleIndirect() { createTable("t1", "a;int", "b;int", "c;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery( - getSchemaFacet("a;int"), "SELECT a FROM t1 WHERE b > 1 GROUP BY a, c ORDER BY c"); - + getSchemaFacet("a;int", "c;int"), + "SELECT a, c FROM t1 WHERE b > 1 GROUP BY a, c ORDER BY c"); + assertCountColumnDependencies(facet, 2); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); - + assertColumnDependsOnType( + facet, "c", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.identity()); + assertCountDatasetDependencies(facet, 4); assertDatasetDependsOnType( facet, FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); assertDatasetDependsOnType( @@ -189,7 +196,7 @@ void simpleQueryPriorityDirect() { getFacetForQuery( getSchemaFacet("i;int", "t;int", "a;int", "ta;int", "tat;int"), "SELECT a as i, a + 1 as t, sum(b) as a, 2 * sum(b) as ta, 2 * sum(b + 3) as tat FROM t1 GROUP BY a"); - + assertCountColumnDependencies(facet, 5); assertColumnDependsOnType( facet, "i", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( @@ -200,6 +207,9 @@ void simpleQueryPriorityDirect() { facet, "ta", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation()); assertColumnDependsOnType( facet, "tat", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation()); + assertCountDatasetDependencies(facet, 1); + assertDatasetDependsOnType( + facet, FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); } @Test @@ -215,7 +225,7 @@ void simpleQueryMasking() { + "sum(b) as a, " + "sha1(string(sum(b))) as ma " + "FROM t1 GROUP BY a"); - + assertCountColumnDependencies(facet, 5); assertColumnDependsOnType( facet, "i", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( @@ -226,25 +236,45 @@ void simpleQueryMasking() { facet, "a", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation()); assertColumnDependsOnType( facet, "ma", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.aggregation(true)); + assertCountDatasetDependencies(facet, 1); + assertDatasetDependsOnType( + facet, FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(GROUP_BY)); } @Test - void simpleQueryWithConditional() { + void simpleQueryWithCaseWhenConditional() { createTable("t1", "a;int", "b;int"); OpenLineage.ColumnLineageDatasetFacet facet = getFacetForQuery( getSchemaFacet("cond;int"), "SELECT CASE WHEN b > 1 THEN a ELSE a + b END AS cond FROM t1"); - + assertCountColumnDependencies(facet, 4); assertColumnDependsOnType( facet, "cond", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( facet, "cond", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); assertColumnDependsOnType( facet, "cond", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.transformation()); + assertColumnDependsOnType( + facet, "cond", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(CONDITIONAL)); + assertCountDatasetDependencies(facet, 0); + } + @Test + void simpleQueryWithIfConditional() { + createTable("t1", "a;int", "b;int"); + OpenLineage.ColumnLineageDatasetFacet facet = + getFacetForQuery(getSchemaFacet("cond;int"), "SELECT IF(b > 1, a, a + b) AS cond FROM t1"); + assertCountColumnDependencies(facet, 4); + assertColumnDependsOnType( + facet, "cond", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); + assertColumnDependsOnType( + facet, "cond", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); + assertColumnDependsOnType( + facet, "cond", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.transformation()); assertColumnDependsOnType( facet, "cond", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(CONDITIONAL)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -254,9 +284,10 @@ void simpleQueryExplode() { getFacetForQuery( getSchemaFacet("a;string"), "SELECT a FROM (SELECT explode(split(a, ' ')) AS a FROM t1)"); - + assertCountColumnDependencies(facet, 1); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); + assertCountDatasetDependencies(facet, 0); } @Test @@ -266,13 +297,14 @@ void simpleQueryRank() { getFacetForQuery( getSchemaFacet("a;string", "rank;int"), "SELECT a, RANK() OVER (PARTITION BY b ORDER BY c) as rank FROM t1;"); - + assertCountColumnDependencies(facet, 3); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( facet, "rank", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(WINDOW)); assertColumnDependsOnType( facet, "rank", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.indirect(WINDOW)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -282,13 +314,31 @@ void simpleQueryWindowedAggregate() { getFacetForQuery( getSchemaFacet("s;int"), "SELECT sum(a) OVER (PARTITION BY b ORDER BY c) AS s FROM t1;"); - + assertCountColumnDependencies(facet, 3); assertColumnDependsOnType( facet, "s", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.aggregation()); assertColumnDependsOnType( facet, "s", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(WINDOW)); assertColumnDependsOnType( facet, "s", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.indirect(WINDOW)); + assertCountDatasetDependencies(facet, 0); + } + + @Test + void simpleQueryWindowedTransformation() { + createTable("t1", "a;int", "b;string", "c;int"); + OpenLineage.ColumnLineageDatasetFacet facet = + getFacetForQuery( + getSchemaFacet("l;int"), + "SELECT LAG(a, 3, 0) OVER (PARTITION BY b ORDER BY c) AS l FROM t1;"); + assertCountColumnDependencies(facet, 3); + assertColumnDependsOnType( + facet, "l", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.transformation()); + assertColumnDependsOnType( + facet, "l", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.indirect(WINDOW)); + assertColumnDependsOnType( + facet, "l", FILE, T1_EXPECTED_NAME, "c", TransformationInfo.indirect(WINDOW)); + assertCountDatasetDependencies(facet, 0); } @Test @@ -303,7 +353,7 @@ void complexQueryCTEJoinsFilter() { + "tmp2 as (SELECT * FROM t2 where c = 1),\n " + "tmp3 as (SELECT tmp.a, b, c from tmp join tmp2 on tmp.a = tmp2.a)\n " + "SELECT tmp3.a as a, b, c, d FROM tmp3 join t3 on tmp3.a = t3.a order by d"); - + assertCountColumnDependencies(facet, 4); assertColumnDependsOnType( facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); assertColumnDependsOnType( @@ -312,7 +362,9 @@ void complexQueryCTEJoinsFilter() { facet, "c", FILE, T2_EXPECTED_NAME, "c", TransformationInfo.identity()); assertColumnDependsOnType( facet, "d", FILE, T3_EXPECTED_NAME, "d", TransformationInfo.identity()); - + // TODO: There are appears to be bug here. + // Normally this should work: assertCountDatasetDependencies(facet, 6); + // However, there appears to be 10 dataset dependencies (i.e. 4 too many). assertDatasetDependsOnType( facet, FILE, T1_EXPECTED_NAME, "a", TransformationInfo.indirect(JOIN)); assertDatasetDependsOnType( @@ -327,6 +379,30 @@ void complexQueryCTEJoinsFilter() { facet, FILE, T3_EXPECTED_NAME, "d", TransformationInfo.indirect(SORT)); } + @Test + void union() { + createTable("t1", "a;int", "b;string"); + createTable("t2", "a;int", "c;string"); + OpenLineage.ColumnLineageDatasetFacet facet = + getFacetForQuery( + getSchemaFacet("a;int", "b;string"), + "SELECT a, b, 'table1' as source\n" + + "FROM t1\n" + + "UNION ALL\n" + + "SELECT a, c, 'table2' as source\n" + + "FROM t2"); + assertCountColumnDependencies(facet, 4); + assertColumnDependsOnType( + facet, "a", FILE, T1_EXPECTED_NAME, "a", TransformationInfo.identity()); + assertColumnDependsOnType( + facet, "a", FILE, T2_EXPECTED_NAME, "a", TransformationInfo.identity()); + assertColumnDependsOnType( + facet, "b", FILE, T1_EXPECTED_NAME, "b", TransformationInfo.identity()); + assertColumnDependsOnType( + facet, "b", FILE, T2_EXPECTED_NAME, "c", TransformationInfo.identity()); + assertCountDatasetDependencies(facet, 0); + } + @NotNull private OpenLineage.ColumnLineageDatasetFacet getFacetForQuery( OpenLineage.SchemaDatasetFacet schemaFacet, String query) { From 2cbef748772306cd9c55de91f8268c5c30ba4db6 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 9 Oct 2024 13:37:24 +0200 Subject: [PATCH 07/89] Update CHANGELOG.md link to PR (#3145) --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 583b1e5e1e..e3f0b903e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,7 @@ ## [1.23.0](https://github.com/OpenLineage/OpenLineage/compare/1.22.0...1.23.0) - 2024-10-04 ### Added -* **Java: added CompositeTransport** [`#3039`](https://github.com/OpenLineage/OpenLineage/pull/2944) [@JDarDagran](https://github.com/JDarDagran) +* **Java: added CompositeTransport** [`#3039`](https://github.com/OpenLineage/OpenLineage/pull/3039) [@JDarDagran](https://github.com/JDarDagran) *This allows user to specify multiple targets to which OpenLineage events will be emitted.* * **Spark extension interfaces: support table extended sources** [`#3062`](https://github.com/OpenLineage/OpenLineage/pull/3062) [@Imbruced](https://github.com/Imbruced) *Interfaces are now able to extract lineage from Table interface, not only RelationProvider.* From 03af2c6176a28d30cdb529ea8da27e8f913fa950 Mon Sep 17 00:00:00 2001 From: Pahulpreet Singh <54016648+codelixir@users.noreply.github.com> Date: Fri, 11 Oct 2024 18:36:29 +0530 Subject: [PATCH 08/89] spark: Limit the Seq size in RddPathUtils::extract() to avoid OutOfMemoryError for large jobs (#3148) * Limit the Seq size in RddPathUtils::extract() Signed-off-by: Pahulpreet Singh * Add flag to limit the logs in RddPathUtils::extract() Signed-off-by: Pahulpreet Singh --------- Signed-off-by: Pahulpreet Singh --- .../openlineage/spark/agent/util/RddPathUtils.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/RddPathUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/RddPathUtils.java index abbc0c9cd0..8a71bcae2b 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/RddPathUtils.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/RddPathUtils.java @@ -7,6 +7,7 @@ import java.util.Arrays; import java.util.Objects; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.reflect.FieldUtils; @@ -115,11 +116,15 @@ public boolean isDefinedAt(Object rdd) { @Override public Stream extract(ParallelCollectionRDD rdd) { + int SEQ_LIMIT = 1000; + AtomicBoolean loggingDone = new AtomicBoolean(false); try { Object data = FieldUtils.readField(rdd, "data", true); log.debug("ParallelCollectionRDD data: {}", data); - if (data instanceof Seq) { - return ScalaConversionUtils.fromSeq((Seq) data).stream() + if ((data instanceof Seq) && ((Seq) data).head() instanceof Tuple2) { + // exit if the first element is invalid + Seq data_slice = (Seq) ((Seq) data).slice(0, SEQ_LIMIT); + return ScalaConversionUtils.fromSeq(data_slice).stream() .map( el -> { Path path = null; @@ -127,8 +132,9 @@ public Stream extract(ParallelCollectionRDD rdd) { // we're able to extract path path = parentOf(((Tuple2) el)._1.toString()); log.debug("Found input {}", path); - } else { + } else if (!loggingDone.get()) { log.warn("unable to extract Path from {}", el.getClass().getCanonicalName()); + loggingDone.set(true); } return path; }) From bf3d4c5939f6d676f49343e349175e9574dd5424 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Fri, 11 Oct 2024 15:29:42 +0200 Subject: [PATCH 09/89] test: Add integration tests for EMR (#3142) * Spark integration has integration tests for EMR Signed-off-by: Artur Owczarek --- integration/spark/app/build.gradle | 40 +- .../io/openlineage/spark/agent/AwsUtils.java | 117 ++++++ .../spark/agent/DynamicParameter.java | 78 ++++ .../spark/agent/EmrIntegrationTest.java | 150 ++++++++ .../spark/agent/EmrTestEnvironment.java | 346 ++++++++++++++++++ .../openlineage/spark/agent/Templating.java | 47 +++ .../resources/emr_test_jobs/basic_script.py | 31 ++ 7 files changed, 806 insertions(+), 3 deletions(-) create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/Templating.java create mode 100644 integration/spark/app/src/test/resources/emr_test_jobs/basic_script.py diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 12ceb9ec6f..76508ad1e5 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -123,6 +123,12 @@ dependencies { exclude group: 'com.fasterxml.jackson.core' exclude group: 'com.fasterxml.jackson.module' } + + testImplementation(platform("software.amazon.awssdk:bom:2.28.11")) + testImplementation("software.amazon.awssdk:auth") + testImplementation("software.amazon.awssdk:emr") + testImplementation("software.amazon.awssdk:s3") + testImplementation("org.mock-server:mockserver-netty:5.14.0:shaded") { exclude group: 'com.google.guava', module: 'guava' exclude group: 'com.fasterxml.jackson.core' @@ -369,6 +375,13 @@ tasks.register("integrationTest", Test.class) { useJUnitPlatform { includeTags("integration-test") excludeTags("configurable-integration-test") + /* + We don't want to run Databricks or AWS integration tests by default. + The "regular" integration tests are run for every pull request. + We want to run Databricks and AWS integration tests on demand and nightly. + See `databricksIntegrationTest` and `awsIntegrationTest` tasks. + */ + excludeTags("aws") excludeTags("databricks") logger.warn("[IntegrationTest] hasDeltaDependencies: ${hasDeltaDependencies(spark, scala)}") if (!hasDeltaDependencies(spark, scala)) { @@ -395,6 +408,27 @@ tasks.register("databricksIntegrationTest", Test) { } } + +tasks.register("awsIntegrationTest", Test) { + group = "verification" + dependsOn(integrationTestDependencies) + testClassesDirs = testSourceSet.output.classesDirs + classpath = files(tasks.shadowJar.outputs.files.singleFile, sourceSets.test.runtimeClasspath) + useJUnitPlatform { + includeTags("aws") + } + options { + /* + The "test" task runs JUnit tests in a separate JVM, and by default, system properties (-D parameters) + are not forwarded to it. Gradle allows forwarding them with -P parameter, but not everyone is aware of that. + Here, we selectively pass only properties that start with "openlineage" to avoid conflicts with + environment-related properties (e.g., current working directory) that could affect resource discovery. + The properties are later used in template resolution. + */ + systemProperties = System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } + } +} + tasks.register("configurableIntegrationTest", Test) { group = "verification" dependsOn(integrationTestDependencies) @@ -403,9 +437,9 @@ tasks.register("configurableIntegrationTest", Test) { useJUnitPlatform { includeTags("configurable-integration-test") } - systemProperties.put("spark.conf.file", configurableTestConfig["sparkConfFile"] ) - systemProperties.put("test.dir", configurableTestConfig["testDir"] ) - systemProperties.put("host.dir", configurableTestConfig["hostDir"] ) + systemProperties.put("spark.conf.file", configurableTestConfig["sparkConfFile"]) + systemProperties.put("test.dir", configurableTestConfig["testDir"]) + systemProperties.put("host.dir", configurableTestConfig["hostDir"]) environment("TESTCONTAINERS_HOST_OVERRIDE", "host.docker.internal") } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java new file mode 100644 index 0000000000..c662298704 --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java @@ -0,0 +1,117 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import io.openlineage.client.OpenLineage; +import io.openlineage.client.OpenLineageClientUtils; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Comparator; +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import lombok.SneakyThrows; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.core.sync.ResponseTransformer; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.*; + +@Slf4j +@UtilityClass +class AwsUtils { + + public static final String OPEN_LINEAGE_JAR_LOCATION = "../build/libs/"; + public static final String S3_TRANSPORT_JAR_LOCATION = + "../../../client/java/transports-s3/build/libs/"; + public static final String SNAPSHOT_JAR_SUFFIX = "-SNAPSHOT.jar"; + + static String s3Url(String bucketName, String key) { + return "s3://" + bucketName + "/" + key; + } + + /** Fetches the newest jar file and uploads it to the specified directory */ + @SneakyThrows + static String uploadOpenLineageJar(S3Client s3Client, String bucket, String prefix) { + Path jarFile = + findNewestFile(OPEN_LINEAGE_JAR_LOCATION, "openlineage-spark_", SNAPSHOT_JAR_SUFFIX) + .orElseThrow(() -> new RuntimeException("openlineage-spark jar not found")); + String uploadedFileKey = uploadFile(s3Client, jarFile, bucket, prefix); + log.info("OpenLineage jar has been uploaded to [{}]", s3Url(bucket, uploadedFileKey)); + return uploadedFileKey; + } + + /** Fetches the newest jar file and uploads it to the specified directory */ + @SneakyThrows + static String uploadS3TransportJar(S3Client s3Client, String bucket, String prefix) { + Path jarFile = + findNewestFile(S3_TRANSPORT_JAR_LOCATION, "transports-s3", "-SNAPSHOT.jar") + .orElseThrow(() -> new RuntimeException("S3 transport jar not found")); + String uploadedFileKey = uploadFile(s3Client, jarFile, bucket, prefix); + log.info("S3 transport jar has been uploaded to [{}]", s3Url(bucket, uploadedFileKey)); + return uploadedFileKey; + } + + private static @NotNull Optional findNewestFile(String first, String prefix, String suffix) + throws IOException { + return Files.list(Paths.get(first)) + .filter(p -> p.getFileName().toString().startsWith(prefix)) + .filter(p -> p.getFileName().toString().endsWith(suffix)) + .max(Comparator.naturalOrder()); + } + + public void deleteFiles(S3Client s3Client, String bucket, String prefix) {} + + static List fetchEventsEmitted( + S3Client s3Client, String bucketName, String location) { + return readAllFilesInPath(s3Client, bucketName, location) + .map(OpenLineageClientUtils::runEventFromJson) + .collect(Collectors.toList()); + } + + private static Stream readAllFilesInPath( + S3Client s3Client, String bucketName, String directoryPath) { + return s3Client + .listObjectsV2( + ListObjectsV2Request.builder().bucket(bucketName).prefix(directoryPath).build()) + .contents() + .stream() + .map(s3Object -> getS3ObjectContent(s3Client, bucketName, s3Object.key())); + } + + private static String getS3ObjectContent(S3Client s3Client, String bucketName, String objectKey) { + return s3Client + .getObject( + GetObjectRequest.builder().bucket(bucketName).key(objectKey).build(), + ResponseTransformer.toBytes()) + .asUtf8String(); + } + + /** + * Uploads the file. If the prefix ends with forward slash, then the name of the file remains + * unchanged. Otherwise, the last part of the prefix is used as the name of the file. + * + * @return The key of the stored file. + */ + static String uploadFile(S3Client s3Client, Path sourceFile, String bucket, String prefix) { + String key = (prefix.endsWith("/") ? prefix : (prefix + "/")) + sourceFile.getFileName(); + s3Client.putObject( + PutObjectRequest.builder().bucket(bucket).key(key).build(), + RequestBody.fromFile(sourceFile.toFile())); + return key; + } + + static void uploadFile(S3Client s3Client, String fileContent, String bucket, String key) { + s3Client.putObject( + PutObjectRequest.builder().bucket(bucket).key(key).build(), + RequestBody.fromString(fileContent)); + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java new file mode 100644 index 0000000000..14204d29c1 --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java @@ -0,0 +1,78 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; + +/** + * Class for dynamic provisioning of parameters. In the current form it retrieves the values from + * the system properties. They can be set using -Dopenlineage.tests.parameterName=value when running + * the application. + */ +@Slf4j +@Getter +public enum DynamicParameter { + // DEVELOPMENT + /** + * The ID of the EMR cluster if we want to use the existing one instead of creating a new one in + * the tests + */ + ClusterId("clusterId", ""), + PreventS3Cleanup("preventS3Cleanup", "false"), + PreventClusterTermination("preventClusterTermination", "false"), + + // CLUSTER + EmrLabel("emrLabel", "emr-7.2.0"), + EventsKeyPrefix("eventsKeyPrefix", "events"), + Ec2InstanceProfile("ec2InstanceProfile", "EMR_EC2_DefaultRole"), + ServiceRole("serviceRole", "EMR_DefaultRole"), + MasterInstanceType("masterInstanceType", "m4.large"), + SlaveInstanceType("slaveInstanceType", "m4.large"), + + /** The bucket where the tests keep the dependency jars, scripts, produced events, logs, etc */ + BucketName("bucketName"), + /** + * The prefix where the tests will be run. Each test execution will have a separate random + * directory inside. + */ + TestsKeyPrefix("testsKeyPrefix", "emr-integration-tests/test-"); + + private final String templateParameter; + private final String defaultValue; + + DynamicParameter(String templateParameter) { + this(templateParameter, null); + } + + DynamicParameter(String templateParameter, String defaultValue) { + this.templateParameter = templateParameter; + this.defaultValue = defaultValue; + } + + String resolve() { + String key = "openlineage.tests." + getTemplateParameter(); + log.debug("Resolving parameter [{}] using key [{}]", name(), key); + String resolved = System.getProperty(key); + if (resolved != null) { + return resolved; + } else { + if (defaultValue != null) { + log.debug( + "The value for parameter [{}] has not been found. Using the default value [{}]", + key, + defaultValue); + return defaultValue; + } + } + throw new RuntimeException( + "The value [" + + key + + "] could not be found in the system properties. Use `-D" + + key + + "=YOUR_VALUE`"); + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java new file mode 100644 index 0000000000..377db3814c --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java @@ -0,0 +1,150 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import static org.assertj.core.api.Assertions.assertThat; + +import io.openlineage.client.OpenLineage; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +/** + * + * + *

EMR Integration Tests

+ * + * This suite of integration tests verifies OpenLineage's functionality on AWS EMR. These tests + * automatically launch a new EMR cluster, upload the necessary OpenLineage JARs and scripts to S3, + * execute those scripts, capture the resulting events, and validate them. After completion, the + * cluster is terminated, and any temporary files are deleted. + * + *

The tests are meant to include only the cases which are difficult to catch with alternative + * methods like unit tests or Spark integration tests in the container. + * + *

To execute the tests, configure the required parameters using system properties (refer to + * {@link DynamicParameter} for more details). + * + *

PySpark Test Samples

+ * + *

Templates

+ * + *

The test scripts use a templating system where parameters are injected via double curly braces + * ({@code {{parameterName}}}). We recommend defining parameters as constants at the top of the + * script for easier reference throughout the code. + * + *

Handling Event Processing

+ * + *

Since the events are processed by a daemon thread, it's essential to add a brief sleep period + * at the end of the script (3 seconds is enough). This ensures that all the Spark events are + * processed before the application terminates. + * + *

Infrastructure Requirements

+ * + *

To run, the tests require infrastructure. Make sure the following are available before you run + * the tests: + * + *

    + *
  • An S3 for storing test files + *
  • An EC2 instance profile + *
  • An IAM role assigned to the EMR cluster + *
+ * + *

Configuration

+ * + *

All infrastructure details and configuration parameters should be set using system properties. + * For example: {@code -Dopenlineage.tests.bucketName=my-bucket-name}. Most parameters have + * defaults. For a full list of configurable parameters, see {@link DynamicParameter}. + * + *

Note on JUnit and Gradle

+ * + *

JUnit runs tests in a separate JVM. When using Gradle, system properties passed as {@code + * -Dparameter=value} are not automatically available in the test JVMs. Gradle script is configured + * to pass system properties prefixed with {@code openlineage}, so only those can be used safely in + * tests. + */ +@Tag("integration-test") +@Tag("aws") +class EmrIntegrationTest { + + private static final EmrTestEnvironment.EmrTestEnvironmentProperties emrTestParameters; + + static { + // Tests prefix with the date mark to tell when they were run in UTC + String testsPrefix = + DynamicParameter.TestsKeyPrefix.resolve() + + ZonedDateTime.now(ZoneOffset.UTC) + .format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")) + + "/"; + emrTestParameters = + EmrTestEnvironment.EmrTestEnvironmentProperties.builder() + .development( + EmrTestEnvironment.EmrTestEnvironmentProperties.Development.builder() + // We can connect to the existing EMR cluster to speed up testing + .clusterId(DynamicParameter.ClusterId.resolve()) + .preventS3Cleanup( + Boolean.parseBoolean(DynamicParameter.PreventS3Cleanup.resolve())) + .preventClusterTermination( + Boolean.parseBoolean(DynamicParameter.PreventClusterTermination.resolve())) + .build()) + .cluster( + EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster.builder() + .emrLabel(DynamicParameter.EmrLabel.resolve()) + .ec2InstanceProfile(DynamicParameter.Ec2InstanceProfile.resolve()) + .serviceRole(DynamicParameter.ServiceRole.resolve()) + .masterInstanceType(DynamicParameter.MasterInstanceType.resolve()) + .slaveInstanceType(DynamicParameter.SlaveInstanceType.resolve()) + .build()) + .bucketName(DynamicParameter.BucketName.resolve()) + .keyPrefix(testsPrefix) + .build(); + } + + private static final EmrTestEnvironment emrTestEnvironment = + new EmrTestEnvironment(emrTestParameters); + + @BeforeAll + public static void setup() {} + + @AfterAll + public static void teardown() { + emrTestEnvironment.s3Cleanup(); + emrTestEnvironment.close(); + } + + @Test + void testBasicScriptHasOutputs() { + List runEvents = + emrTestEnvironment.runScript( + "basic_script.py", + Map.of( + "bucketName", + emrTestParameters.getBucketName(), + "outputPrefix", + emrTestParameters.getKeyPrefix() + "output", + "namespace", + "someNamespace")); + + assertThat(runEvents).isNotEmpty(); + + OpenLineage.RunEvent completeEvent = + runEvents.stream() + .filter(runEvent -> !runEvent.getOutputs().isEmpty()) + .filter(runEvent -> runEvent.getEventType() == OpenLineage.RunEvent.EventType.COMPLETE) + .findFirst() + .get(); + + assertThat("someNamespace").isEqualTo(completeEvent.getJob().getNamespace()); + assertThat("s3://" + emrTestParameters.getBucketName()) + .isEqualTo(completeEvent.getOutputs().get(0).getNamespace()); + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java new file mode 100644 index 0000000000..83212e0d70 --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java @@ -0,0 +1,346 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import io.openlineage.client.OpenLineage; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; +import lombok.NonNull; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import software.amazon.awssdk.core.internal.waiters.ResponseOrException; +import software.amazon.awssdk.services.emr.EmrClient; +import software.amazon.awssdk.services.emr.model.*; +import software.amazon.awssdk.services.emr.waiters.EmrWaiter; +import software.amazon.awssdk.services.s3.S3Client; + +@Slf4j +public class EmrTestEnvironment implements AutoCloseable { + private final EmrClient client = EmrClient.builder().build(); + private final S3Client s3Client = S3Client.builder().build(); + private final EmrWaiter waiter = client.waiter(); + private final String openLineageJarKey; + private final String s3TransportJarKey; + + /** The value is null when the cluster has not been started. */ + @Nullable private final String clusterId; + + private final EmrTestEnvironmentProperties properties; + + @Builder + @Getter + static class EmrTestEnvironmentProperties { + @NonNull private final Development development; + @NonNull private final NewCluster cluster; + @NonNull private final String bucketName; + // The unique prefix used to run the tests. It is the location where the files with jars, + // scripts, events and logs will be stored + @NonNull private final String keyPrefix; + + /** This class contains settings to facilitate development. */ + @Builder + @Getter + static class Development { + // Optional ID of the cluster if we want to use the existing cluster instead of creating a new + // one. If specified, the properties from NewCluster won't be used. + // Empty String represents no value. + private final String clusterId; + // Optional flag preventing the cleanup process from S3. Useful when the results are different + // from expected, and we want to investigate it + private final boolean preventS3Cleanup; + // Optional flag preventing the cluster shutdown at the end. Useful when you want to create + // the + // cluster + // with test but want to keep it for future tests + private final boolean preventClusterTermination; + // The name of the bucket used for jars, scripts and logs + } + + @Builder + @Getter + static class NewCluster { + @NonNull private final String emrLabel; + @NonNull private final String serviceRole; + @NonNull private final String ec2InstanceProfile; + @NonNull private final String masterInstanceType; + @NonNull private final String slaveInstanceType; + } + + public String getJobsPrefix() { + return keyPrefix + "jobs/"; + } + + public String getJarsPrefix() { + return keyPrefix + "jars/"; + } + + public String getEventsPrefix() { + return keyPrefix + "events/"; + } + + public String getEventsForScriptPrefix(String scriptName) { + return getEventsPrefix() + scriptName + "/"; + } + + public String getLogsPrefix() { + return keyPrefix + "logs/"; + } + + public String getScriptKey(String scriptName) { + return keyPrefix + "scripts/" + scriptName; + } + } + + EmrTestEnvironment(EmrTestEnvironmentProperties properties) { + this.properties = properties; + String bucketName = properties.getBucketName(); + log.info( + "Initiating EMR environment. The jars will be stores under [{}]. The logs will be stored under [{}]. The jobs will be stored under [{}]. The events will be stored under [{}]", + AwsUtils.s3Url(bucketName, properties.getJarsPrefix()), + AwsUtils.s3Url(bucketName, properties.getLogsPrefix()), + AwsUtils.s3Url(bucketName, properties.getJobsPrefix()), + AwsUtils.s3Url(bucketName, properties.getEventsPrefix())); + + this.openLineageJarKey = + AwsUtils.uploadOpenLineageJar(s3Client, bucketName, properties.getJarsPrefix()); + this.s3TransportJarKey = + AwsUtils.uploadS3TransportJar(s3Client, bucketName, properties.getJarsPrefix()); + + // We can connect to the existing cluster. It can speed up testing. The existing cluster won't + // be closed at the end. + if (!properties.getDevelopment().getClusterId().isEmpty()) { + log.info( + "Attaching to the existing cluster [{}]", properties.getDevelopment().getClusterId()); + this.clusterId = properties.getDevelopment().getClusterId(); + } else { + log.info("Creating a new EMR cluster"); + this.clusterId = createNewCluster(properties); + } + } + + /** + * Runs the PySpark job from emr_test_job directory with the given name. Then retrieves the + * emitted events. + */ + List runScript(String scriptName, Map parametersMap) { + Map parametersMapExtended = new HashMap<>(parametersMap); + parametersMapExtended.put("eventsPrefix", properties.getEventsForScriptPrefix(scriptName)); + + String scriptLocalPath = "emr_test_jobs/" + scriptName; + String eventsForScriptPrefix = properties.getEventsForScriptPrefix(scriptName); + String bucketName = properties.getBucketName(); + + String scriptS3Location = + uploadScriptToS3(scriptName, scriptLocalPath, parametersMapExtended, bucketName); + + submitJob(scriptName, bucketName, scriptS3Location); + + return AwsUtils.fetchEventsEmitted(s3Client, bucketName, eventsForScriptPrefix); + } + + /** Renders script template and uploads it to S3 */ + private @NotNull String uploadScriptToS3( + String scriptName, + String scriptLocalPath, + Map parametersMap, + String bucketName) { + String scriptS3Key = properties.getScriptKey(scriptName); + String scriptS3Location = AwsUtils.s3Url(bucketName, scriptS3Key); + log.info("Uploading script [{}] to [{}]", scriptName, scriptS3Location); + AwsUtils.uploadFile( + s3Client, + Templating.renderTemplate(scriptLocalPath, parametersMap), + bucketName, + scriptS3Key); + log.info("The script [{}] has been uploaded to [{}].", scriptLocalPath, scriptS3Location); + return scriptS3Location; + } + + private void submitJob(String scriptName, String bucketName, String scriptS3Location) { + // We attach OpenLineage and S3 transport jars + String jars = + String.join( + ",", + ImmutableList.of( + AwsUtils.s3Url(bucketName, openLineageJarKey), + AwsUtils.s3Url(bucketName, s3TransportJarKey))); + + log.info("Submitting step with the job."); + AddJobFlowStepsResponse addJobFlowStepsResponse = + client.addJobFlowSteps( + AddJobFlowStepsRequest.builder() + .jobFlowId(clusterId) + .steps( + StepConfig.builder() + .name("run-" + scriptName) + .actionOnFailure(ActionOnFailure.CONTINUE) + .hadoopJarStep( + HadoopJarStepConfig.builder() + .jar("command-runner.jar") + .args( + "spark-submit", + "--jars", + jars, + "--conf", + "spark.extraListeners=io.openlineage.spark.agent.OpenLineageSparkListener", + "--conf", + "spark.openlineage.transport.fileNamePrefix=" + + properties.getEventsForScriptPrefix(scriptName), + "--conf", + "spark.openlineage.transport.type=s3", + "--conf", + "spark.openlineage.transport.bucketName=" + bucketName, + scriptS3Location) + .build()) + .build()) + .build()); + String stepId = addJobFlowStepsResponse.stepIds().get(0); + log.info("PySpark step submitted with ID [{}]. Waiting for completion.", stepId); + waitForStepToComplete(stepId); + log.info("PySpark step [{}] completed. Fetching events.", stepId); + } + + void waitForStepToComplete(String stepId) { + DescribeStepRequest describeStepRequest = + DescribeStepRequest.builder().clusterId(clusterId).stepId(stepId).build(); + + ResponseOrException matched = + waiter.waitUntilStepComplete(describeStepRequest).matched(); + + matched + .response() + .ifPresent( + response -> { + StepStatus stepStatus = response.step().status(); + log.info("Step [{}] completed with status: [{}]", stepId, stepStatus); + }); + + matched + .exception() + .ifPresent( + e -> { + throw new RuntimeException( + "The step [" + stepId + "] did not complete successfully", e); + }); + } + + public void s3Cleanup() { + if (properties.getDevelopment().isPreventS3Cleanup()) { + log.info( + "The [{}] flag has been enabled. Skipping S3 cleanup. Remember to remove it manually.", + DynamicParameter.PreventS3Cleanup.getTemplateParameter()); + } else { + log.info("Deleting the files under [{}]", properties.getKeyPrefix()); + AwsUtils.deleteFiles(s3Client, properties.getBucketName(), properties.getKeyPrefix()); + } + } + + @Override + public void close() { + // We close the cluster only when the cluster has been created by the tests. + // We can still prevent shutting down if the developer asks so. + if (clusterId != null && properties.getDevelopment().getClusterId() == null) { + if (!properties.getDevelopment().isPreventClusterTermination()) { + client.terminateJobFlows(TerminateJobFlowsRequest.builder().jobFlowIds(clusterId).build()); + waitForClusterTerminated(clusterId); + } else { + log.warn("Preventing shutting down the cluster. Make sure you terminate it manually."); + } + } + log.info("Closing EMR client"); + client.close(); + log.info("Closing S3 client"); + s3Client.close(); + } + + private String createNewCluster(EmrTestEnvironmentProperties properties) { + EmrTestEnvironmentProperties.NewCluster cluster = properties.getCluster(); + RunJobFlowRequest request = + RunJobFlowRequest.builder() + .name("OpenLineageIntegrationTest") + .releaseLabel(cluster.getEmrLabel()) + .applications(Application.builder().name("Spark").build()) + .logUri(AwsUtils.s3Url(properties.getBucketName(), properties.getLogsPrefix())) + .configurations( + Configuration.builder() + .classification("hive-site") + .properties( + ImmutableMap.of( + "hive.metastore.client.factory.class", + "com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory", + "hive.execution.engine", + "spark")) + .build()) + .instances( + JobFlowInstancesConfig.builder() + .instanceCount(1) + .keepJobFlowAliveWhenNoSteps(true) // Cluster doesn't shut down immediately + .masterInstanceType(cluster.getMasterInstanceType()) + .slaveInstanceType(cluster.getSlaveInstanceType()) + .build()) + .jobFlowRole(cluster.getEc2InstanceProfile()) + .serviceRole(cluster.getServiceRole()) + .build(); + String clusterId = client.runJobFlow(request).jobFlowId(); + waitForClusterReady(clusterId); + return clusterId; + } + + private void waitForClusterReady(String clusterId) { + log.info("Waiting for cluster [{}] ready", clusterId); + // The default waiting strategy is to poll the cluster for 30 minutes (max 60 times) with around + // 30 seconds between each attempt until the cluster says it is ready. + ResponseOrException waiterResponse = + waiter + .waitUntilClusterRunning(DescribeClusterRequest.builder().clusterId(clusterId).build()) + .matched(); + + waiterResponse + .response() + .ifPresent( + response -> { + log.info("Cluster [{}] is ready", clusterId); + }); + + waiterResponse + .exception() + .ifPresent( + e -> { + throw new RuntimeException("Cluster didn't reach the expected state", e); + }); + } + + private void waitForClusterTerminated(String clusterId) { + log.info("Terminating cluster {}", clusterId); + // The default waiting strategy is to poll the cluster for 30 minutes (max 60 times) with around + // 30 seconds between each attempt until the cluster says it is ready. + ResponseOrException waiterResponse = + waiter + .waitUntilClusterTerminated( + DescribeClusterRequest.builder().clusterId(clusterId).build()) + .matched(); + + waiterResponse + .response() + .ifPresent( + response -> { + log.info("Cluster [{}] has been terminated", clusterId); + }); + + waiterResponse + .exception() + .ifPresent( + e -> { + throw new RuntimeException("Cluster did not terminate successfully", e); + }); + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/Templating.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/Templating.java new file mode 100644 index 0000000000..6fae6dc5bd --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/Templating.java @@ -0,0 +1,47 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import com.google.common.io.Resources; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.Map; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; + +@Slf4j +@UtilityClass +public class Templating { + + public String renderTemplate(String templatePath, Map parameters) { + return substituteParameters(parameters, readFileContent(templatePath)); + } + + private static String substituteParameters( + Map parameters, String templateContent) { + String result = templateContent; + + for (Map.Entry entry : parameters.entrySet()) { + result = result.replace("{{" + entry.getKey() + "}}", entry.getValue()); + } + + return result; + } + + private static @NotNull String readFileContent(String templatePath) { + String templateContent; + try { + String path = Resources.getResource(templatePath).getPath(); + log.debug("Reading template file: {}", path); + templateContent = new String(Files.readAllBytes(Paths.get(path))); + } catch (IOException e) { + throw new RuntimeException(e); + } + return templateContent; + } +} diff --git a/integration/spark/app/src/test/resources/emr_test_jobs/basic_script.py b/integration/spark/app/src/test/resources/emr_test_jobs/basic_script.py new file mode 100644 index 0000000000..c52b370869 --- /dev/null +++ b/integration/spark/app/src/test/resources/emr_test_jobs/basic_script.py @@ -0,0 +1,31 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 + +from pyspark.sql import SparkSession +from time import sleep + +# Parameter provisioned by the templating system +BUCKET_NAME = "{{bucketName}}" +OUTPUT_PREFIX = "{{outputPrefix}}" +NAMESPACE = "{{namespace}}" + +spark = ( + SparkSession.builder.config("spark.openlineage.namespace", NAMESPACE).appName("emr_example").getOrCreate() +) + +people_df = spark.createDataFrame( + [ + (1, "John", "Smith", 49), + (2, "Mary", "Brown", 12), + (3, "Tom", "White", 51), + (4, "Bruce", "Willis", 18), + (5, "Jason", "Mane", 22), + ], + ["id", "first_name", "last_name", "age"], +) + +people_df.write.mode("overwrite").format("parquet").option( + "path", f"s3://{BUCKET_NAME}/{OUTPUT_PREFIX}/emr_test_script/test" +).saveAsTable("destination_table") + +sleep(3) From 0b9e5ed644e639d4836187d502443148ad504d51 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 14 Oct 2024 07:53:51 +0200 Subject: [PATCH 10/89] Bump the integration-spark group in /integration/spark with 2 updates (#3152) Bumps the integration-spark group in /integration/spark with 2 updates: software.amazon.awssdk:bom and [plugin.serialization](https://github.com/JetBrains/kotlin). Updates `software.amazon.awssdk:bom` from 2.28.11 to 2.28.21 Updates `plugin.serialization` from 2.0.20 to 2.0.21 - [Release notes](https://github.com/JetBrains/kotlin/releases) - [Changelog](https://github.com/JetBrains/kotlin/blob/v2.0.21/ChangeLog.md) - [Commits](https://github.com/JetBrains/kotlin/compare/v2.0.20...v2.0.21) --- updated-dependencies: - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: plugin.serialization dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- integration/spark/app/build.gradle | 2 +- integration/spark/buildSrc/build.gradle.kts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 76508ad1e5..339d13a5e7 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -124,7 +124,7 @@ dependencies { exclude group: 'com.fasterxml.jackson.module' } - testImplementation(platform("software.amazon.awssdk:bom:2.28.11")) + testImplementation(platform("software.amazon.awssdk:bom:2.28.21")) testImplementation("software.amazon.awssdk:auth") testImplementation("software.amazon.awssdk:emr") testImplementation("software.amazon.awssdk:s3") diff --git a/integration/spark/buildSrc/build.gradle.kts b/integration/spark/buildSrc/build.gradle.kts index 5400f2d4e5..f2ef7d9bdf 100644 --- a/integration/spark/buildSrc/build.gradle.kts +++ b/integration/spark/buildSrc/build.gradle.kts @@ -1,6 +1,6 @@ plugins { `kotlin-dsl` - kotlin("plugin.serialization") version "2.0.20" + kotlin("plugin.serialization") version "2.0.21" } repositories { From 4ba3dd27fa085b5374e8c555b5fa07b659a422c6 Mon Sep 17 00:00:00 2001 From: "pawel.leszczynski" Date: Tue, 15 Oct 2024 13:42:52 +0200 Subject: [PATCH 11/89] [docs] fix argos config to run on the main repo (#3154) Signed-off-by: Pawel Leszczynski --- .../workflows/visual-difference-detection.yml | 6 ++++++ 1 file changed, 6 insertions(+) rename {website/.github => .github}/workflows/visual-difference-detection.yml (97%) diff --git a/website/.github/workflows/visual-difference-detection.yml b/.github/workflows/visual-difference-detection.yml similarity index 97% rename from website/.github/workflows/visual-difference-detection.yml rename to .github/workflows/visual-difference-detection.yml index c27ff3585c..3836e8b204 100644 --- a/website/.github/workflows/visual-difference-detection.yml +++ b/.github/workflows/visual-difference-detection.yml @@ -38,6 +38,9 @@ jobs: needs: check-label if: needs.check-label.outputs.visual-comparison-required-label-found == 'true' runs-on: ubuntu-latest + defaults: + run: + working-directory: ./website steps: # We switch to the main branch to take the screenshots - name: Check out repository code @@ -76,6 +79,9 @@ jobs: needs: [check-label, take-screenshots-main] if: needs.check-label.outputs.visual-comparison-required-label-found == 'true' runs-on: ubuntu-latest + defaults: + run: + working-directory: ./website steps: - name: Check out repository code uses: actions/checkout@v4 From 365427ee4f997d9c58f29f4011e9e854a74ef456 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 16 Oct 2024 10:29:02 +0200 Subject: [PATCH 12/89] fix: Fix a bunch of warnings (#3158) Signed-off-by: Artur Owczarek --- .../spark/agent/MetastoreHive2Test.java | 2 +- .../spark/agent/MetastoreHive3Test.java | 2 +- .../spark/agent/lifecycle/LibraryTest.java | 2 +- .../spark/streaming/MongoStreamingJob.scala | 48 +++++++++---------- .../io/openlineage/spark/test/RddUnion.scala | 6 +-- 5 files changed, 30 insertions(+), 30 deletions(-) diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive2Test.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive2Test.java index c3532dacdd..9a3397b24d 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive2Test.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive2Test.java @@ -94,6 +94,6 @@ void IcebergTablesTest() { } public static Dataset executeSql(String query, String... params) { - return spark.sql(String.format(query, params)); + return spark.sql(String.format(query, (Object[]) params)); } } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive3Test.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive3Test.java index ab074613b5..4f08155b36 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive3Test.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/MetastoreHive3Test.java @@ -95,6 +95,6 @@ void IcebergTablesTest() { } public static Dataset executeSql(String query, String... params) { - return spark.sql(String.format(query, params)); + return spark.sql(String.format(query, (Object[]) params)); } } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/lifecycle/LibraryTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/lifecycle/LibraryTest.java index fdbddeb262..49d673ef13 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/lifecycle/LibraryTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/lifecycle/LibraryTest.java @@ -192,7 +192,7 @@ Map stripSchemaURL(Map map) { void testRDDName(SparkSession spark) { JavaSparkContext sc = new JavaSparkContext(spark.sparkContext()); JavaRDD numbers = - sc.parallelize(IntStream.range(1, 100).mapToObj(Integer::new).collect(Collectors.toList())); + sc.parallelize(IntStream.range(1, 100).boxed().collect(Collectors.toList())); numbers.setName("numbers"); JavaRDD transformed = numbers.filter(n -> n > 10 && n < 90).map(i -> i * i).map(String::valueOf); diff --git a/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/streaming/MongoStreamingJob.scala b/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/streaming/MongoStreamingJob.scala index 13442e8f01..c0343fdc20 100644 --- a/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/streaming/MongoStreamingJob.scala +++ b/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/streaming/MongoStreamingJob.scala @@ -19,30 +19,30 @@ object MongoStreamingJob { private def runJob(spark: SparkSession): Unit = { try { - val readSchema = new StructType(). - add("_id", DataTypes.StringType). - add("name", DataTypes.StringType). - add("date", DataTypes.TimestampType). - add("location", DataTypes.StringType) - - val sourceStream = spark.readStream. - format("mongodb"). - option("spark.mongodb.change.stream.publish.full.document.only", "true"). - option("spark.mongodb.connection.uri", "mongodb://m1:27017"). - option("spark.mongodb.database", "events"). - option("spark.mongodb.collection", "events"). - option("forceDeleteTempCheckpointLocation", "true"). - option("spark.mongodb.change.stream.change.stream.full.document", "updateLookup"). - schema(readSchema). - load - - sourceStream. - writeStream. - format("console"). - option("truncate", "false"). - trigger(Trigger.ProcessingTime(Duration.ofSeconds(4).toMillis)). - start. - awaitTermination(Duration.ofSeconds(30).toMillis) + val readSchema = new StructType() + .add("_id", DataTypes.StringType) + .add("name", DataTypes.StringType) + .add("date", DataTypes.TimestampType) + .add("location", DataTypes.StringType) + + val sourceStream = spark.readStream + .format("mongodb") + .option("spark.mongodb.change.stream.publish.full.document.only", "true") + .option("spark.mongodb.connection.uri", "mongodb://m1:27017") + .option("spark.mongodb.database", "events") + .option("spark.mongodb.collection", "events") + .option("forceDeleteTempCheckpointLocation", "true") + .option("spark.mongodb.change.stream.change.stream.full.document", "updateLookup") + .schema(readSchema) + .load() + + sourceStream + .writeStream + .format("console") + .option("truncate", "false") + .trigger(Trigger.ProcessingTime(Duration.ofSeconds(4).toMillis)) + .start() + .awaitTermination(Duration.ofSeconds(30).toMillis) } catch { case e: Exception => log.error("Caught an exception", e) } finally { diff --git a/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/test/RddUnion.scala b/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/test/RddUnion.scala index 7e32934953..3a05b960cb 100644 --- a/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/test/RddUnion.scala +++ b/integration/spark/scala-fixtures/src/main/scala/io/openlineage/spark/test/RddUnion.scala @@ -8,14 +8,14 @@ package io.openlineage.spark.test import org.apache.spark.sql.{SaveMode, SparkSession} object RddUnion extends App { - val spark = SparkSession.builder.appName("RddUnion").getOrCreate() + val spark = SparkSession.builder().appName("RddUnion").getOrCreate() val sc = spark.sparkContext import spark.implicits._ val dataDir = System.getProperty("data.dir", "/tmp/scala-test") sc - .parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) + .parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).toIndexedSeq) .map(a => a.toString) .toDF() .write @@ -23,7 +23,7 @@ object RddUnion extends App { .parquet(s"$dataDir/rdd_input1") sc - .parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)) + .parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8, 9, 10).toIndexedSeq) .map(a => a.toString) .toDF() .write From 2e68de336a567320e4ef68c401e2fadb619f6da0 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 16 Oct 2024 14:42:54 +0200 Subject: [PATCH 13/89] fix: Fix Kotlin capitalized function deprecation warning (#3162) Signed-off-by: Artur Owczarek --- .../io/openlineage/gradle/plugin/ScalaVariantDelegate.kt | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/ScalaVariantDelegate.kt b/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/ScalaVariantDelegate.kt index 6e87e10c3c..4ac109cac0 100644 --- a/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/ScalaVariantDelegate.kt +++ b/integration/spark/buildSrc/src/main/kotlin/io/openlineage/gradle/plugin/ScalaVariantDelegate.kt @@ -8,7 +8,6 @@ package io.openlineage.gradle.plugin import org.gradle.api.Project import org.gradle.api.tasks.SourceSetContainer import org.gradle.api.tasks.testing.Test -import org.gradle.configurationcache.extensions.capitalized import org.gradle.jvm.tasks.Jar import org.gradle.kotlin.dsl.get import org.gradle.kotlin.dsl.getByType @@ -49,6 +48,9 @@ class ScalaVariantDelegate( configureArtifacts() } + fun CharSequence.capitalized(): String = + toString().replaceFirstChar { if (it.isLowerCase()) it.titlecase() else it.toString() } + private fun getSourceSetContainer(target: Project) = target.extensions.getByType() From 0cc6d0078a48099f9b60373d2c4c1204d790753c Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 16 Oct 2024 17:04:08 +0200 Subject: [PATCH 14/89] use uv to manage maturin in python sqlparser interface (#3161) Signed-off-by: Maciej Obuchowski --- .../sql/iface-py/script/build-macos.sh | 15 +-------------- .../sql/iface-py/script/setup-macos.sh | 19 +++++++------------ 2 files changed, 8 insertions(+), 26 deletions(-) diff --git a/integration/sql/iface-py/script/build-macos.sh b/integration/sql/iface-py/script/build-macos.sh index 39616e4339..f5aa5d6f29 100755 --- a/integration/sql/iface-py/script/build-macos.sh +++ b/integration/sql/iface-py/script/build-macos.sh @@ -7,19 +7,6 @@ # It's assumed that it will be run on MacOS set -e -if [ -x "$(command -v /usr/local/opt/python@3.8/bin/python3)" ]; then - /usr/local/opt/python@3.8/bin/python3 -m venv .env -elif [ -x "$(command -v /usr/local/bin/python3.8)" ]; then - /usr/local/bin/python3.8 -m venv .env -elif [ -x "$(command -v python3.8)" ]; then - python3.8 -m venv .env -else - python -m venv .env -fi - -source .env/bin/activate -source $HOME/.cargo/env - # Disable incremental compilation, since it causes issues. export CARGO_INCREMENTAL=0 @@ -35,7 +22,7 @@ if [[ -d "./iface-py" ]] then cd iface-py fi -maturin build --target universal2-apple-darwin --out target/wheels --release --strip +uv tool run maturin build --target universal2-apple-darwin --out target/wheels --release --strip echo "Package build, trying to import" echo "Platform:" diff --git a/integration/sql/iface-py/script/setup-macos.sh b/integration/sql/iface-py/script/setup-macos.sh index 0dbe22519d..9b9e32b79e 100755 --- a/integration/sql/iface-py/script/setup-macos.sh +++ b/integration/sql/iface-py/script/setup-macos.sh @@ -13,25 +13,20 @@ echo "Installing homebrew" export HOMEBREW_NO_AUTO_UPDATE=1 export HOMEBREW_NO_INSTALL_CLEANUP=1 -echo "Installing Python 3.8" -brew install python@3.8 - echo "Installing Rust" curl https://sh.rustup.rs -sSf | sh -s -- -y +echo "Installing uv" +curl -LsSf https://astral.sh/uv/install.sh | sh source $HOME/.cargo/env rustup target add aarch64-apple-darwin rustup target add x86_64-apple-darwin +echo "Installing Python 3.8" +uv python install 3.8 + # Maturin is build tool that we're using. It can build python wheels based on standard Rust Cargo.toml. echo "Installing Maturin" -if [ -x "$(command -v /usr/local/opt/python@3.8/bin/python3)" ]; then - /usr/local/opt/python@3.8/bin/python3 -m pip install maturin -elif [ -x "$(command -v /usr/local/bin/python3.8)" ]; then - /usr/local/bin/python3.8 -m pip install maturin -elif [ -x "$(command -v python3.8)" ]; then - python3.8 -m pip install maturin -else - python -m pip install maturin -fi +echo "$PWD" +(cd iface-py && uv sync --no-install-project) From 040084c7970caca059a9e0ccafd0355477df62aa Mon Sep 17 00:00:00 2001 From: "pawel.leszczynski" Date: Thu, 17 Oct 2024 08:40:02 +0200 Subject: [PATCH 15/89] fix databricks naming mismatch in documentation (#3163) Signed-off-by: Pawel Leszczynski --- website/docs/spec/naming.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/spec/naming.md b/website/docs/spec/naming.md index 3cbf2c802a..c491d8fed6 100644 --- a/website/docs/spec/naming.md +++ b/website/docs/spec/naming.md @@ -29,7 +29,7 @@ A dataset, or `table`, is organized according to a producer, namespace, database | Snowflake | Warehouse | snowflake://{organization name}-{account name} | {database}.{schema}.{table} | | Trino | Warehouse | trino://{host}:{port} | {catalog}.{schema}.{table} | | ABFSS (Azure Data Lake Gen2) | Data lake | abfss://{container name}@{service name}.dfs.core.windows.net | {path} | -| DBFS (Databricks File System) | Distributed file system | hdfs://{workspace name} | {path} | +| DBFS (Databricks File System) | Distributed file system | dbfs://{workspace name} | {path} | | GCS | Blob storage | gs://{bucket name} | {object key} | | HDFS | Distributed file system | hdfs://{namenode host}:{namenode port} | {path} | | Kafka | distributed event streaming platform | kafka://{bootstrap server host}:{port} | {topic} | From 3068639d0c7b742f8d6394209923d970d8109d9f Mon Sep 17 00:00:00 2001 From: ddebowczyk92 Date: Thu, 17 Oct 2024 10:28:15 +0200 Subject: [PATCH 16/89] Rename Dataplex transport to GcpLineage (#3156) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Dominik Dębowczyk --- .circleci/continue_config.yml | 10 +++--- client/java/settings.gradle | 2 +- ...lineage.client.transports.TransportBuilder | 1 - .../README.md | 10 +++--- .../build.gradle | 2 +- .../gcplineage/GcpLineageTransport.java} | 31 +++++++++--------- .../GcpLineageTransportBuilder.java} | 12 +++---- .../GcpLineageTransportConfig.java} | 9 +++--- ...lineage.client.transports.TransportBuilder | 1 + .../gcplineage/GcpLineageTransportTest.java} | 28 ++++++++-------- .../client/java/partials/java_transport.md | 32 +++++++++---------- 11 files changed, 70 insertions(+), 68 deletions(-) delete mode 100644 client/java/transports-dataplex/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder rename client/java/{transports-dataplex => transports-gcplineage}/README.md (86%) rename client/java/{transports-dataplex => transports-gcplineage}/build.gradle (91%) rename client/java/{transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransport.java => transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransport.java} (87%) rename client/java/{transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransportBuilder.java => transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportBuilder.java} (64%) rename client/java/{transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexConfig.java => transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportConfig.java} (77%) create mode 100644 client/java/transports-gcplineage/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder rename client/java/{transports-dataplex/src/test/java/io/openlineage/client/transports/dataplex/DataplexTransportTest.java => transports-gcplineage/src/test/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportTest.java} (81%) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index cd50fe766c..bffa003ecb 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -462,7 +462,7 @@ jobs: - store_test_results: path: build/test-results/test - store_test_results: - path: transports-dataplex/build/test-results/test + path: transports-gcplineage/build/test-results/test - store_test_results: path: transports-gcs/build/test-results/test - store_test_results: @@ -471,7 +471,7 @@ jobs: path: build/reports/tests/test destination: test-report - store_artifacts: - path: transports-dataplex/build/reports/tests/test + path: transports-gcplineage/build/reports/tests/test destination: test-report - store_artifacts: path: transports-gcs/build/reports/tests/test @@ -486,7 +486,7 @@ jobs: path: build/libs destination: libs - store_artifacts: - path: transports-dataplex/build/libs + path: transports-gcplineage/build/libs destination: libs - store_artifacts: path: transports-gcs/build/libs @@ -517,8 +517,8 @@ jobs: path: ./build/libs destination: java-client-artifacts - store_artifacts: - path: ./transports-dataplex/build/libs - destination: transports-dataplex-artifacts + path: ./transports-gcplineage/build/libs + destination: transports-gcplineage-artifacts - store_artifacts: path: ./transports-gcs/build/libs destination: transports-gcs-artifacts diff --git a/client/java/settings.gradle b/client/java/settings.gradle index 25bf7109c1..20ce2cf992 100644 --- a/client/java/settings.gradle +++ b/client/java/settings.gradle @@ -6,7 +6,7 @@ rootProject.name = 'openlineage-java' include('generator') -include('transports-dataplex') +include('transports-gcplineage') include('transports-gcs') include('transports-s3') diff --git a/client/java/transports-dataplex/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder b/client/java/transports-dataplex/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder deleted file mode 100644 index 1d7f57f705..0000000000 --- a/client/java/transports-dataplex/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder +++ /dev/null @@ -1 +0,0 @@ -io.openlineage.client.transports.dataplex.DataplexTransportBuilder \ No newline at end of file diff --git a/client/java/transports-dataplex/README.md b/client/java/transports-gcplineage/README.md similarity index 86% rename from client/java/transports-dataplex/README.md rename to client/java/transports-gcplineage/README.md index 756823869b..92ed32c759 100644 --- a/client/java/transports-dataplex/README.md +++ b/client/java/transports-gcplineage/README.md @@ -1,8 +1,8 @@ -# Google Cloud Platform Dataplex Transport +# Google Cloud Platform GcpLineage Transport This library provides a transport layer that integrates OpenLineage with Google Cloud Platform's Dataplex service. It wraps the `com.google.cloud.datalineage:producerclient-java8` library into an OpenLineage transport, allowing you to -emit lineage events directly to Dataplex using `gRPC` channel. +emit lineage events directly to GCP Lineage service using `gRPC` channel. ## Getting Started @@ -19,14 +19,14 @@ be emitted correctly. io.openlineage - transports-dataplex + transports-gcplineage YOUR_VERSION_HERE ``` ### Configuration -- `type` - string, must be `"dataplex"`. Required. +- `type` - string, must be `"gcplineage"`. Required. - `endpoint` - string, specifies the endpoint to which events are sent, default value is `datalineage.googleapis.com:443`. Optional. - `projectId` - string, the project quota identifier. If not provided, it is determined based on user credentials. @@ -39,7 +39,7 @@ be emitted correctly. Optional, if not provided [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) are used -- `mode` - enum that specifies the type of client used for publishing OpenLineage events to Dataplex. Possible values: +- `mode` - enum that specifies the type of client used for publishing OpenLineage events to GCP Lineage service. Possible values: `sync` (synchronous) or `async` (asynchronous). Optional, default: `async`. ### Behavior diff --git a/client/java/transports-dataplex/build.gradle b/client/java/transports-gcplineage/build.gradle similarity index 91% rename from client/java/transports-dataplex/build.gradle rename to client/java/transports-gcplineage/build.gradle index 1d96ada927..58b834b572 100644 --- a/client/java/transports-dataplex/build.gradle +++ b/client/java/transports-gcplineage/build.gradle @@ -19,7 +19,7 @@ plugins { } ext { - projectDescription = "GCP Dataplex OpenLineage transport library" + projectDescription = "GcpLineage OpenLineage transport library" } dependencies { diff --git a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransport.java b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransport.java similarity index 87% rename from client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransport.java rename to client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransport.java index cc2855b418..ca3f840fda 100644 --- a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransport.java +++ b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransport.java @@ -2,7 +2,7 @@ /* Copyright 2018-2024 contributors to the OpenLineage project /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports.dataplex; +package io.openlineage.client.transports.gcplineage; import com.google.api.core.ApiFuture; import com.google.api.core.ApiFutureCallback; @@ -36,15 +36,15 @@ import lombok.extern.slf4j.Slf4j; @Slf4j -public class DataplexTransport extends Transport { +public class GcpLineageTransport extends Transport { private final ProducerClientWrapper producerClientWrapper; - public DataplexTransport(@NonNull DataplexConfig config) throws IOException { + public GcpLineageTransport(@NonNull GcpLineageTransportConfig config) throws IOException { this(new ProducerClientWrapper(config)); } - protected DataplexTransport(@NonNull ProducerClientWrapper client) throws IOException { + protected GcpLineageTransport(@NonNull ProducerClientWrapper client) throws IOException { this.producerClientWrapper = client; } @@ -73,9 +73,9 @@ static class ProducerClientWrapper implements Closeable { private final AsyncLineageClient asyncLineageClient; private final String parent; - protected ProducerClientWrapper(DataplexConfig config) throws IOException { + protected ProducerClientWrapper(GcpLineageTransportConfig config) throws IOException { LineageSettings settings; - if (DataplexConfig.Mode.sync == config.getMode()) { + if (GcpLineageTransportConfig.Mode.sync == config.getMode()) { settings = createSyncSettings(config); syncLineageClient = SyncLineageProducerClient.create((SyncLineageProducerClientSettings) settings); @@ -89,14 +89,14 @@ protected ProducerClientWrapper(DataplexConfig config) throws IOException { this.parent = getParent(config, settings); } - protected ProducerClientWrapper(DataplexConfig config, SyncLineageClient client) + protected ProducerClientWrapper(GcpLineageTransportConfig config, SyncLineageClient client) throws IOException { this.syncLineageClient = client; this.parent = getParent(config, createAsyncSettings(config)); this.asyncLineageClient = null; } - protected ProducerClientWrapper(DataplexConfig config, AsyncLineageClient client) + protected ProducerClientWrapper(GcpLineageTransportConfig config, AsyncLineageClient client) throws IOException { this.asyncLineageClient = client; this.parent = getParent(config, createSyncSettings(config)); @@ -140,29 +140,30 @@ public void onSuccess(ProcessOpenLineageRunEventResponse result) { ApiFutures.addCallback(future, callback, MoreExecutors.directExecutor()); } - private String getParent(DataplexConfig config, LineageSettings settings) throws IOException { + private String getParent(GcpLineageTransportConfig config, LineageSettings settings) + throws IOException { return String.format( "projects/%s/locations/%s", getProjectId(config, settings), config.getLocation() != null ? config.getLocation() : "us"); } - private static SyncLineageProducerClientSettings createSyncSettings(DataplexConfig config) - throws IOException { + private static SyncLineageProducerClientSettings createSyncSettings( + GcpLineageTransportConfig config) throws IOException { SyncLineageProducerClientSettings.Builder builder = SyncLineageProducerClientSettings.newBuilder(); return createSettings(config, builder).build(); } - private static AsyncLineageProducerClientSettings createAsyncSettings(DataplexConfig config) - throws IOException { + private static AsyncLineageProducerClientSettings createAsyncSettings( + GcpLineageTransportConfig config) throws IOException { AsyncLineageProducerClientSettings.Builder builder = AsyncLineageProducerClientSettings.newBuilder(); return createSettings(config, builder).build(); } private static T createSettings( - DataplexConfig config, T builder) throws IOException { + GcpLineageTransportConfig config, T builder) throws IOException { if (config.getEndpoint() != null) { builder.setEndpoint(config.getEndpoint()); } @@ -179,7 +180,7 @@ private static T createSettings( return builder; } - private static String getProjectId(DataplexConfig config, LineageSettings settings) + private static String getProjectId(GcpLineageTransportConfig config, LineageSettings settings) throws IOException { if (config.getProjectId() != null) { return config.getProjectId(); diff --git a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransportBuilder.java b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportBuilder.java similarity index 64% rename from client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransportBuilder.java rename to client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportBuilder.java index 2385369a2a..319ecf8fdb 100644 --- a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransportBuilder.java +++ b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportBuilder.java @@ -2,7 +2,7 @@ /* Copyright 2018-2024 contributors to the OpenLineage project /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports.dataplex; +package io.openlineage.client.transports.gcplineage; import io.openlineage.client.OpenLineageClientException; import io.openlineage.client.transports.Transport; @@ -10,24 +10,24 @@ import io.openlineage.client.transports.TransportConfig; import java.io.IOException; -public class DataplexTransportBuilder implements TransportBuilder { +public class GcpLineageTransportBuilder implements TransportBuilder { @Override public String getType() { - return "dataplex"; + return "gcplineage"; } @Override public TransportConfig getConfig() { - return new DataplexConfig(); + return new GcpLineageTransportConfig(); } @Override public Transport build(TransportConfig config) { try { - return new DataplexTransport((DataplexConfig) config); + return new GcpLineageTransport((GcpLineageTransportConfig) config); } catch (IOException e) { throw new OpenLineageClientException( - "An exception occurred while creating a DataplexTransport", e); + "An exception occurred while creating a GcpLineageTransport", e); } } } diff --git a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexConfig.java b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportConfig.java similarity index 77% rename from client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexConfig.java rename to client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportConfig.java index ef39273df5..8fee8fcb6b 100644 --- a/client/java/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexConfig.java +++ b/client/java/transports-gcplineage/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportConfig.java @@ -2,7 +2,7 @@ /* Copyright 2018-2024 contributors to the OpenLineage project /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports.dataplex; +package io.openlineage.client.transports.gcplineage; import io.openlineage.client.MergeConfig; import io.openlineage.client.transports.TransportConfig; @@ -16,7 +16,8 @@ @NoArgsConstructor @AllArgsConstructor @ToString -public class DataplexConfig implements TransportConfig, MergeConfig { +public class GcpLineageTransportConfig + implements TransportConfig, MergeConfig { enum Mode { sync, @@ -34,8 +35,8 @@ enum Mode { @Getter @Setter private @Nullable Mode mode; @Override - public DataplexConfig mergeWithNonNull(DataplexConfig other) { - return new DataplexConfig( + public GcpLineageTransportConfig mergeWithNonNull(GcpLineageTransportConfig other) { + return new GcpLineageTransportConfig( mergePropertyWith(endpoint, other.endpoint), mergePropertyWith(projectId, other.projectId), mergePropertyWith(credentialsFile, other.credentialsFile), diff --git a/client/java/transports-gcplineage/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder b/client/java/transports-gcplineage/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder new file mode 100644 index 0000000000..c6250235dd --- /dev/null +++ b/client/java/transports-gcplineage/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder @@ -0,0 +1 @@ +io.openlineage.client.transports.gcplineage.GcpLineageTransportBuilder \ No newline at end of file diff --git a/client/java/transports-dataplex/src/test/java/io/openlineage/client/transports/dataplex/DataplexTransportTest.java b/client/java/transports-gcplineage/src/test/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportTest.java similarity index 81% rename from client/java/transports-dataplex/src/test/java/io/openlineage/client/transports/dataplex/DataplexTransportTest.java rename to client/java/transports-gcplineage/src/test/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportTest.java index c66af9c84c..379d0d36cb 100644 --- a/client/java/transports-dataplex/src/test/java/io/openlineage/client/transports/dataplex/DataplexTransportTest.java +++ b/client/java/transports-gcplineage/src/test/java/io/openlineage/client/transports/gcplineage/GcpLineageTransportTest.java @@ -2,7 +2,7 @@ /* Copyright 2018-2024 contributors to the OpenLineage project /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports.dataplex; +package io.openlineage.client.transports.gcplineage; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; @@ -26,20 +26,20 @@ import java.util.UUID; import org.junit.jupiter.api.Test; -class DataplexTransportTest { +class GcpLineageTransportTest { @Test void clientEmitsRunEventGCPTransportSyncMode() throws Exception { SyncLineageProducerClient syncClient = mock(SyncLineageProducerClient.class); - DataplexConfig config = new DataplexConfig(); + GcpLineageTransportConfig config = new GcpLineageTransportConfig(); config.setProjectId("my-project"); config.setLocation("us"); - DataplexTransport.ProducerClientWrapper clientWrapper = - new DataplexTransport.ProducerClientWrapper(config, syncClient); + GcpLineageTransport.ProducerClientWrapper clientWrapper = + new GcpLineageTransport.ProducerClientWrapper(config, syncClient); - Transport transport = new DataplexTransport(clientWrapper); + Transport transport = new GcpLineageTransport(clientWrapper); OpenLineageClient client = new OpenLineageClient(transport); OpenLineage.RunEvent event = runEvent(); @@ -59,15 +59,15 @@ void clientEmitsRunEventGCPTransportSyncMode() throws Exception { @Test void clientEmitsRunEventGCPTransportAsyncMode() throws Exception { AsyncLineageProducerClient asyncClient = mock(AsyncLineageProducerClient.class); - DataplexConfig config = new DataplexConfig(); + GcpLineageTransportConfig config = new GcpLineageTransportConfig(); config.setProjectId("my-project"); config.setLocation("us"); - DataplexTransport.ProducerClientWrapper clientWrapper = - new DataplexTransport.ProducerClientWrapper(config, asyncClient); + GcpLineageTransport.ProducerClientWrapper clientWrapper = + new GcpLineageTransport.ProducerClientWrapper(config, asyncClient); - Transport transport = new DataplexTransport(clientWrapper); + Transport transport = new GcpLineageTransport(clientWrapper); OpenLineageClient client = new OpenLineageClient(transport); OpenLineage.RunEvent event = runEvent(); @@ -86,14 +86,14 @@ void clientEmitsRunEventGCPTransportAsyncMode() throws Exception { @Test void gcpTransportRaisesOnException() throws Exception { AsyncLineageProducerClient async = mock(AsyncLineageProducerClient.class); - DataplexConfig config = new DataplexConfig(); + GcpLineageTransportConfig config = new GcpLineageTransportConfig(); config.setProjectId("my-project"); config.setLocation("us"); - DataplexTransport.ProducerClientWrapper clientWrapper = - new DataplexTransport.ProducerClientWrapper(config, async); + GcpLineageTransport.ProducerClientWrapper clientWrapper = + new GcpLineageTransport.ProducerClientWrapper(config, async); - Transport transport = new DataplexTransport(clientWrapper); + Transport transport = new GcpLineageTransport(clientWrapper); OpenLineageClient client = new OpenLineageClient(transport); OpenLineage.RunEvent event = runEvent(); diff --git a/website/docs/client/java/partials/java_transport.md b/website/docs/client/java/partials/java_transport.md index 7b708f4d8b..2c3a26479d 100644 --- a/website/docs/client/java/partials/java_transport.md +++ b/website/docs/client/java/partials/java_transport.md @@ -704,15 +704,15 @@ OpenLineageClient client = OpenLineageClient.builder() -### [Dataplex](https://github.com/OpenLineage/OpenLineage/blob/main/client/transports-dataplex/src/main/java/io/openlineage/client/transports/dataplex/DataplexTransport.java) +### [GcpLineage](https://github.com/OpenLineage/OpenLineage/blob/main/client/transports-dataplex/src/main/java/io/openlineage/client/transports/gcplineage/GcpLineageTransport.java) -To use this transport in your project, you need to include `io.openlineage:transports-dataplex` artifact in +To use this transport in your project, you need to include `io.openlineage:transports-gcplineage` artifact in your build configuration. This is particularly important for environments like `Spark`, where this transport must be on the classpath for lineage events to be emitted correctly. #### Configuration -- `type` - string, must be `"dataplex"`. Required. +- `type` - string, must be `"gcplineage"`. Required. - `endpoint` - string, specifies the endpoint to which events are sent, default value is `datalineage.googleapis.com:443`. Optional. - `projectId` - string, the project quota identifier. If not provided, it is determined based on user credentials. @@ -725,12 +725,12 @@ the classpath for lineage events to be emitted correctly. Optional, if not provided [Application Default Credentials](https://cloud.google.com/docs/authentication/application-default-credentials) are used -- `mode` - enum that specifies the type of client used for publishing OpenLineage events to Dataplex. Possible values: +- `mode` - enum that specifies the type of client used for publishing OpenLineage events to GCP Lineage service. Possible values: `sync` (synchronous) or `async` (asynchronous). Optional, default: `async`. #### Behavior -- Events are serialized to JSON, included as part of a `gRPC` request, and then dispatched to the `Dataplex` endpoint. +- Events are serialized to JSON, included as part of a `gRPC` request, and then dispatched to the `GCP Lineage service` endpoint. - Depending on the `mode` chosen, requests are sent using either a synchronous or asynchronous client. #### Examples @@ -740,7 +740,7 @@ the classpath for lineage events to be emitted correctly. ```yaml transport: - type: dataplex + type: gcplineage projectId: your_gcp_project_id location: us mode: sync @@ -751,7 +751,7 @@ transport: ```ini -spark.openlineage.transport.type=dataplex +spark.openlineage.transport.type=gcplineage spark.openlineage.transport.projectId=your_gcp_project_id spark.openlineage.transport.location=us spark.openlineage.transport.mode=sync @@ -762,7 +762,7 @@ spark.openlineage.transport.credentialsFile=path/to/credentials.json ```ini -openlineage.transport.type=dataplex +openlineage.transport.type=gcplineage openlineage.transport.projectId=your_gcp_project_id openlineage.transport.location=us openlineage.transport.mode=sync @@ -774,20 +774,20 @@ openlineage.transport.credentialsFile=path/to/credentials.json ```java import io.openlineage.client.OpenLineageClient; -import io.openlineage.client.transports.dataplex.DataplexConfig; -import io.openlineage.client.transports.dataplex.DataplexTransport; +import io.openlineage.client.transports.gcplineage.GcpLineageTransportConfig; +import io.openlineage.client.transports.dataplex.GcpLineageTransport; -DataplexConfig dataplexConfig = new DataplexConfig(); +GcpLineageTransportConfig gcplineageConfig = new GcpLineageTransportConfig(); -dataplexConfig.setProjectId("your_kinesis_stream_name"); -dataplexConfig.setLocation("your_aws_region"); -dataplexConfig.setMode("sync"); -dataplexConfig.setCredentialsFile("path/to/credentials.json"); +gcplineageConfig.setProjectId("your_gcp_project_id"); +gcplineageConfig.setLocation("your_gcp_location"); +gcplineageConfig.setMode("sync"); +gcplineageConfig.setCredentialsFile("path/to/credentials.json"); OpenLineageClient client = OpenLineageClient.builder() .transport( - new DataplexTransport(dataplexConfig)) + new GcpLineageTransport(gcplineageConfig)) .build(); ``` From 5b971e6a5137d6668d7e7dfa8eb5367d9c37e8d6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Oct 2024 10:56:47 +0200 Subject: [PATCH 17/89] Bump cookie and express in /website (#3155) Bumps [cookie](https://github.com/jshttp/cookie) and [express](https://github.com/expressjs/express). These dependencies needed to be updated together. Updates `cookie` from 0.6.0 to 0.7.1 - [Release notes](https://github.com/jshttp/cookie/releases) - [Commits](https://github.com/jshttp/cookie/compare/v0.6.0...v0.7.1) Updates `express` from 4.21.0 to 4.21.1 - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/4.21.1/History.md) - [Commits](https://github.com/expressjs/express/compare/4.21.0...4.21.1) --- updated-dependencies: - dependency-name: cookie dependency-type: indirect - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- website/package-lock.json | 28 ++++++++++++++-------------- website/yarn.lock | 16 ++++++++-------- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/website/package-lock.json b/website/package-lock.json index 413c71fd21..c66fd50cad 100644 --- a/website/package-lock.json +++ b/website/package-lock.json @@ -6831,9 +6831,9 @@ } }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "engines": { "node": ">= 0.6" } @@ -8357,16 +8357,16 @@ } }, "node_modules/express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "version": "4.21.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", + "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -22642,9 +22642,9 @@ } }, "cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==" + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==" }, "cookie-signature": { "version": "1.0.6", @@ -23626,16 +23626,16 @@ "dev": true }, "express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "version": "4.21.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", + "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", "requires": { "accepts": "~1.3.8", "array-flatten": "1.1.1", "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", diff --git a/website/yarn.lock b/website/yarn.lock index 86116ee940..dbd907baae 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -3831,10 +3831,10 @@ cookie-signature@1.0.6: resolved "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz" integrity sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ== -cookie@0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" - integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== +cookie@0.7.1: + version "0.7.1" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" + integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== copy-text-to-clipboard@^3.0.1: version "3.0.1" @@ -4726,16 +4726,16 @@ expand-template@^2.0.3: integrity sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg== express@^4.17.3: - version "4.21.0" - resolved "https://registry.yarnpkg.com/express/-/express-4.21.0.tgz#d57cb706d49623d4ac27833f1cbc466b668eb915" - integrity sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng== + version "4.21.1" + resolved "https://registry.yarnpkg.com/express/-/express-4.21.1.tgz#9dae5dda832f16b4eec941a4e44aa89ec481b281" + integrity sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" body-parser "1.20.3" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.6.0" + cookie "0.7.1" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" From 386e32a492689761153a7a639341f834fb43b09c Mon Sep 17 00:00:00 2001 From: "Sheeri K. Cabral" Date: Thu, 17 Oct 2024 09:43:29 -0400 Subject: [PATCH 18/89] changing from 2nd wed to 3rd (#3166) Signed-off-by: Sheeri K. Cabral --- website/src/pages/meetings/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/src/pages/meetings/index.mdx b/website/src/pages/meetings/index.mdx index 2a37016c94..d482ae0f0d 100644 --- a/website/src/pages/meetings/index.mdx +++ b/website/src/pages/meetings/index.mdx @@ -17,7 +17,7 @@ TSC Meetings

-

The OpenLineage Technical Steering Committee meets monthly on the second Wednesday from 9:30am to 10:30am US Pacific.

+

The OpenLineage Technical Steering Committee meets monthly on the third Wednesday from 9:30am to 10:30am US Pacific.

TSC meetings are open to all who RSVP. During them, we review recent releases, hear from contributors of major new developments, and feature guest speakers on various topics of interest to the community.

These meetings take place on Zoom. Meetings are recorded and published on the OpenLineage YouTube Channel. Notes for the meeting are published in a page on the OpenLineage Wiki.

To RSVP, select a meeting from the list or click this link (or the Subscribe link at the top of the list) to be invited to the complete series. Once you RSVP, you will receive a calendar invite with the video meeting link and password.

From 65931fbe654be5b31cd1dc0e7dd7e298a929ef5f Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Thu, 17 Oct 2024 15:43:43 +0200 Subject: [PATCH 19/89] Fix/emr implicit glue catalog (#3147) * test: Add integration tests for EMR * Spark integration has integration tests for EMR Signed-off-by: Artur Owczarek * fix: EMR should determine implicit Glue catalog id while producing symlinks * AWS Glue symlinks in EMR for implicit catalog id are correct ARNs Signed-off-by: Artur Owczarek --------- Signed-off-by: Artur Owczarek --- .../workflows/integration-tests-spark-aws.yml | 52 ++++++++ integration/spark/app/build.gradle | 3 + .../io/openlineage/spark/agent/AwsUtils.java | 59 ++++++++- .../spark/agent/DynamicParameter.java | 16 +++ .../spark/agent/EmrIntegrationTest.java | 112 +++++++++++++++-- .../spark/agent/EmrTestEnvironment.java | 54 +++++--- .../glue_symlink_implicit_account_id.py | 54 ++++++++ integration/spark/build.gradle | 6 + integration/spark/shared/build.gradle | 12 +- .../spark/agent/util/AwsAccountIdFetcher.java | 40 ++++++ .../spark/agent/util/AwsUtils.java | 119 ++++++++++++++++++ .../spark/agent/util/PathUtils.java | 52 +------- .../plan/catalog/IcebergHandler.java | 3 +- 13 files changed, 500 insertions(+), 82 deletions(-) create mode 100644 .github/workflows/integration-tests-spark-aws.yml create mode 100644 integration/spark/app/src/test/resources/emr_test_jobs/glue_symlink_implicit_account_id.py create mode 100644 integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsAccountIdFetcher.java create mode 100644 integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsUtils.java diff --git a/.github/workflows/integration-tests-spark-aws.yml b/.github/workflows/integration-tests-spark-aws.yml new file mode 100644 index 0000000000..6c4b414f6a --- /dev/null +++ b/.github/workflows/integration-tests-spark-aws.yml @@ -0,0 +1,52 @@ +name: Integration tests for Spark AWS + +# Runs the AWS integration tests +# +# The workflow is triggered manually. To run, you have to install GitHub CLI, setup secrets and run: +# gh workflow run "Integration tests for Spark AWS" --ref +# +# The required secrets are: +# - EMR_TESTS_AWS_ACCESS_KEY_ID +# - EMR_TESTS_AWS_SECRET_ACCESS_KEY +# - EMR_TESTS_AWS_REGION +# - EMR_TESTS_BUCKET_NAME +# - EMR_TESTS_EC2_SUBNET_ID +# +# For local development use the command as in the step "Run AWS integration tests" below. You can specify +# more parameters. See class DynamicParameters and EmrIntegrationTest in Spark integration for details. + +on: + workflow_dispatch: + +defaults: + run: + working-directory: integration/spark + +jobs: + run-integration-tests-emr: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.EMR_TESTS_AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.EMR_TESTS_AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.EMR_TESTS_AWS_REGION }} + - uses: actions/setup-java@v4 + with: + distribution: 'corretto' + java-version: '17' + - name: Build Spark integration dependencies + run: ./buildDependencies.sh + - name: Build Spark integration jar + run: ./gradlew shadowJar -x test -Pjava.compile.home=${JAVA_HOME} + - name: Run AWS integration tests + run: ./gradlew awsIntegrationTest --info -x test -Pjava.compile.home=${JAVA_HOME} -Dopenlineage.tests.bucketName=${{ secrets.EMR_TESTS_BUCKET_NAME }} -Dopenlineage.tests.ec2SubnetId=${{ secrets.EMR_TESTS_EC2_SUBNET_ID }} + - name: Upload test results + uses: actions/upload-artifact@v4 + if: always() + with: + name: test-results + path: integration/spark/app/build/test-results \ No newline at end of file diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 339d13a5e7..720bdc344e 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -427,6 +427,9 @@ tasks.register("awsIntegrationTest", Test) { */ systemProperties = System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } } + testLogging { + exceptionFormat "full" + } } tasks.register("configurableIntegrationTest", Test) { diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java index c662298704..13e276a754 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/AwsUtils.java @@ -11,6 +11,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Optional; @@ -68,13 +69,63 @@ static String uploadS3TransportJar(S3Client s3Client, String bucket, String pref .max(Comparator.naturalOrder()); } - public void deleteFiles(S3Client s3Client, String bucket, String prefix) {} + @SuppressWarnings("PMD.NullAssignment") + public void deleteFiles(S3Client s3Client, String bucket, String prefix) { + String continuationToken = null; + + do { + // Step 1: List objects with the specified prefix + ListObjectsV2Request.Builder listRequestBuilder = + ListObjectsV2Request.builder() + .bucket(bucket) + .prefix(prefix) + .maxKeys(1000); // Max keys per request + + if (continuationToken != null) { + listRequestBuilder.continuationToken(continuationToken); + } + + ListObjectsV2Response listResponse = s3Client.listObjectsV2(listRequestBuilder.build()); + + List objectsToDelete = new ArrayList<>(); + + for (S3Object s3Object : listResponse.contents()) { + objectsToDelete.add(ObjectIdentifier.builder().key(s3Object.key()).build()); + } + + // Step 2: Delete objects in batches (up to 1000 per request) + if (!objectsToDelete.isEmpty()) { + DeleteObjectsRequest deleteRequest = + DeleteObjectsRequest.builder() + .bucket(bucket) + .delete(Delete.builder().objects(objectsToDelete).build()) + .build(); + + DeleteObjectsResponse deleteResponse = s3Client.deleteObjects(deleteRequest); + + List errors = deleteResponse.errors(); + if (!errors.isEmpty()) { + for (S3Error error : errors) { + log.error("Failed to delete [{}]. Error message: [{}]", error.key(), error.message()); + } + } + } + + // Step 3: Prepare for the next iteration if more objects are available + continuationToken = listResponse.isTruncated() ? listResponse.nextContinuationToken() : null; + + } while (continuationToken != null); + } static List fetchEventsEmitted( S3Client s3Client, String bucketName, String location) { - return readAllFilesInPath(s3Client, bucketName, location) - .map(OpenLineageClientUtils::runEventFromJson) - .collect(Collectors.toList()); + log.info("Fetching events from [{}]...", AwsUtils.s3Url(bucketName, location)); + List events = + readAllFilesInPath(s3Client, bucketName, location) + .map(OpenLineageClientUtils::runEventFromJson) + .collect(Collectors.toList()); + log.info("There are [{}] events.", events.size()); + return events; } private static Stream readAllFilesInPath( diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java index 14204d29c1..ae49a3090c 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java @@ -24,6 +24,18 @@ public enum DynamicParameter { ClusterId("clusterId", ""), PreventS3Cleanup("preventS3Cleanup", "false"), PreventClusterTermination("preventClusterTermination", "false"), + /** + * Determines which port can be used to debug the application. For debugging to work, make sure + * the EC2 subnet has the firewall rule, allowing you to access the master node using this port. + * You have to edit the EC2 security group the cluster is attached to and add the TCP inbound + * rule. Then you can use remote debugging option in your IDE (with this port and the master + * node's IP address) to attach session. If attaching seems to keep forever, it means that the + * firewall rule is not correct. If the server rejects the debugger's connection it means the + * application is not running yet, and you should repeat the attempt or make sure it is still + * running. You should run the cluster beforehand, note the master IP address and have the + * debugging session prepared before you attach the + */ + DebugPort("debugPort", "5005"), // CLUSTER EmrLabel("emrLabel", "emr-7.2.0"), @@ -32,6 +44,10 @@ public enum DynamicParameter { ServiceRole("serviceRole", "EMR_DefaultRole"), MasterInstanceType("masterInstanceType", "m4.large"), SlaveInstanceType("slaveInstanceType", "m4.large"), + Ec2SubnetId("ec2SubnetId"), + /** The optional key pair which can be used to SSH to the cluster. Useful for troubleshooting. */ + SshKeyPairName("sshKeyPairName", ""), + IdleClusterTerminationSeconds("clusterIdleTerminationSeconds", "300"), /** The bucket where the tests keep the dependency jars, scripts, produced events, logs, etc */ BucketName("bucketName"), diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java index 377db3814c..ea643a6dde 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java @@ -12,6 +12,7 @@ import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.util.List; +import java.util.Locale; import java.util.Map; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -85,25 +86,34 @@ class EmrIntegrationTest { + ZonedDateTime.now(ZoneOffset.UTC) .format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")) + "/"; + String clusterId = DynamicParameter.ClusterId.resolve(); + EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster newCluster = + "".equals(clusterId) + ? EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster.builder() + .emrLabel(DynamicParameter.EmrLabel.resolve()) + .ec2InstanceProfile(DynamicParameter.Ec2InstanceProfile.resolve()) + .serviceRole(DynamicParameter.ServiceRole.resolve()) + .masterInstanceType(DynamicParameter.MasterInstanceType.resolve()) + .slaveInstanceType(DynamicParameter.SlaveInstanceType.resolve()) + .subnetId(DynamicParameter.Ec2SubnetId.resolve()) + .ec2SshKeyName(DynamicParameter.SshKeyPairName.resolve()) + .idleClusterTerminationSeconds( + Long.parseLong(DynamicParameter.IdleClusterTerminationSeconds.resolve())) + .build() + : null; emrTestParameters = EmrTestEnvironment.EmrTestEnvironmentProperties.builder() .development( EmrTestEnvironment.EmrTestEnvironmentProperties.Development.builder() // We can connect to the existing EMR cluster to speed up testing - .clusterId(DynamicParameter.ClusterId.resolve()) + .clusterId(clusterId) .preventS3Cleanup( Boolean.parseBoolean(DynamicParameter.PreventS3Cleanup.resolve())) .preventClusterTermination( Boolean.parseBoolean(DynamicParameter.PreventClusterTermination.resolve())) + .debugPort(Integer.parseInt(DynamicParameter.DebugPort.resolve())) .build()) - .cluster( - EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster.builder() - .emrLabel(DynamicParameter.EmrLabel.resolve()) - .ec2InstanceProfile(DynamicParameter.Ec2InstanceProfile.resolve()) - .serviceRole(DynamicParameter.ServiceRole.resolve()) - .masterInstanceType(DynamicParameter.MasterInstanceType.resolve()) - .slaveInstanceType(DynamicParameter.SlaveInstanceType.resolve()) - .build()) + .cluster(newCluster) .bucketName(DynamicParameter.BucketName.resolve()) .keyPrefix(testsPrefix) .build(); @@ -147,4 +157,88 @@ void testBasicScriptHasOutputs() { assertThat("s3://" + emrTestParameters.getBucketName()) .isEqualTo(completeEvent.getOutputs().get(0).getNamespace()); } + + @SuppressWarnings("PMD.JUnitTestContainsTooManyAsserts") + @Test + void testImplicitGlueCatalogIsUsed() { + /* + * The Aws Glue integration in EMR connects to the Glue catalog with the identifier specified by property + * "hive.metastore.glue.catalogid". When it is not provided, we still want to use Glue and receive + * the correct symlinks. In such scenarios the account ID of the current AWS account will be used as catalog ID. + */ + String bucketName = emrTestParameters.getBucketName(); + String outputPrefix = emrTestParameters.getKeyPrefix() + "output"; + String databaseName = "peopleDatabase"; + String glueDatabaseName = databaseName.toLowerCase(Locale.ROOT); + String databaseLocation = outputPrefix + "/" + databaseName; + String inputTableName = "peopleSource"; + String glueInputTableName = inputTableName.toLowerCase(Locale.ROOT); + String outputTableName = "peopleDestination"; + String glueOutputTableName = outputTableName.toLowerCase(Locale.ROOT); + List runEvents = + emrTestEnvironment.runScript( + "glue_symlink_implicit_account_id.py", + Map.of( + "bucketName", bucketName, + "outputPrefix", outputPrefix, + "databaseName", databaseName, + "sourceTableName", inputTableName, + "destinationTableName", outputTableName)); + + assertThat(runEvents).isNotEmpty(); + + OpenLineage.RunEvent inOutEvent = + runEvents.stream() + .filter(runEvent -> runEvent.getEventType() == OpenLineage.RunEvent.EventType.COMPLETE) + .filter(runEvent -> !runEvent.getOutputs().isEmpty()) + .filter(runEvent -> !runEvent.getInputs().isEmpty()) + .filter( + runEvent -> + // Has output symlink + { + OpenLineage.SymlinksDatasetFacet symlinks = + runEvent.getOutputs().get(0).getFacets().getSymlinks(); + return symlinks != null && !symlinks.getIdentifiers().isEmpty(); + }) + .filter( + runEvent -> + // Has input symlink + { + OpenLineage.SymlinksDatasetFacet symlinks = + runEvent.getInputs().get(0).getFacets().getSymlinks(); + return symlinks != null && !symlinks.getIdentifiers().isEmpty(); + }) + .findFirst() + .get(); + + OpenLineage.InputDataset inputDataset = inOutEvent.getInputs().get(0); + OpenLineage.SymlinksDatasetFacetIdentifiers inputDatasetSymlink = + inputDataset.getFacets().getSymlinks().getIdentifiers().get(0); + + assertThat(inputDataset.getNamespace()).isEqualTo("s3://" + bucketName); + /* + Note: + The dataset name (table files' location) is formed by concatenating the database location and the table name. + - The database location is case-sensitive. + - Both the table name and database name are always converted to lowercase in AWS Glue. + + As a result, when combining them, the dataset name might contain a mix of uppercase (from the location) + and lowercase characters (from the table name). + */ + assertThat(inputDataset.getName()).isEqualTo(databaseLocation + "/" + glueInputTableName); + assertThat(inputDatasetSymlink.getNamespace()).startsWith("arn:aws:glue:"); + assertThat(inputDatasetSymlink.getName()) + .isEqualTo("table/" + glueDatabaseName + "/" + glueInputTableName); + assertThat(inputDatasetSymlink.getType()).isEqualTo("TABLE"); + + OpenLineage.OutputDataset outputDataset = inOutEvent.getOutputs().get(0); + OpenLineage.SymlinksDatasetFacetIdentifiers outputDatasetSymlink = + outputDataset.getFacets().getSymlinks().getIdentifiers().get(0); + assertThat(outputDataset.getNamespace()).isEqualTo("s3://" + bucketName); + assertThat(outputDataset.getName()).isEqualTo(databaseLocation + "/" + glueOutputTableName); + assertThat(outputDatasetSymlink.getNamespace()).startsWith("arn:aws:glue:"); + assertThat(outputDatasetSymlink.getName()) + .isEqualTo("table/" + glueDatabaseName + "/" + glueOutputTableName); + assertThat(outputDatasetSymlink.getType()).isEqualTo("TABLE"); + } } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java index 83212e0d70..356e74a9e4 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java @@ -8,6 +8,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.openlineage.client.OpenLineage; +import java.time.Duration; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -18,11 +19,18 @@ import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import software.amazon.awssdk.core.internal.waiters.ResponseOrException; +import software.amazon.awssdk.core.waiters.WaiterOverrideConfiguration; +import software.amazon.awssdk.retries.api.BackoffStrategy; import software.amazon.awssdk.services.emr.EmrClient; import software.amazon.awssdk.services.emr.model.*; import software.amazon.awssdk.services.emr.waiters.EmrWaiter; import software.amazon.awssdk.services.s3.S3Client; +/** + * The class that manages all the resources required to perform EMR integration tests. It provides + * convenient methods for running scripts and retrieving the events. It creates the cluster, + * terminates it, cleans up the S3 resources. + */ @Slf4j public class EmrTestEnvironment implements AutoCloseable { private final EmrClient client = EmrClient.builder().build(); @@ -40,7 +48,8 @@ public class EmrTestEnvironment implements AutoCloseable { @Getter static class EmrTestEnvironmentProperties { @NonNull private final Development development; - @NonNull private final NewCluster cluster; + private final NewCluster cluster; + // The name of the bucket used for jars, scripts and logs @NonNull private final String bucketName; // The unique prefix used to run the tests. It is the location where the files with jars, // scripts, events and logs will be stored @@ -62,7 +71,7 @@ static class Development { // cluster // with test but want to keep it for future tests private final boolean preventClusterTermination; - // The name of the bucket used for jars, scripts and logs + private final int debugPort; } @Builder @@ -73,6 +82,9 @@ static class NewCluster { @NonNull private final String ec2InstanceProfile; @NonNull private final String masterInstanceType; @NonNull private final String slaveInstanceType; + @NonNull private final String subnetId; + @NonNull private final String ec2SshKeyName; + private final long idleClusterTerminationSeconds; } public String getJobsPrefix() { @@ -190,6 +202,9 @@ private void submitJob(String scriptName, String bucketName, String scriptS3Loca "spark-submit", "--jars", jars, + "--driver-java-options", + "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:" + + properties.getDevelopment().getDebugPort(), "--conf", "spark.extraListeners=io.openlineage.spark.agent.OpenLineageSparkListener", "--conf", @@ -204,9 +219,9 @@ private void submitJob(String scriptName, String bucketName, String scriptS3Loca .build()) .build()); String stepId = addJobFlowStepsResponse.stepIds().get(0); - log.info("PySpark step submitted with ID [{}]. Waiting for completion.", stepId); + log.info("PySpark step submitted with ID [{}]. Waiting for completion...", stepId); waitForStepToComplete(stepId); - log.info("PySpark step [{}] completed. Fetching events.", stepId); + log.info("PySpark step [{}] completed.", stepId); } void waitForStepToComplete(String stepId) { @@ -214,7 +229,15 @@ void waitForStepToComplete(String stepId) { DescribeStepRequest.builder().clusterId(clusterId).stepId(stepId).build(); ResponseOrException matched = - waiter.waitUntilStepComplete(describeStepRequest).matched(); + waiter + .waitUntilStepComplete( + describeStepRequest, + WaiterOverrideConfiguration.builder() + .backoffStrategyV2(BackoffStrategy.fixedDelay(Duration.ofSeconds(5))) + .maxAttempts(120) + .waitTimeout(Duration.ofMinutes(10)) + .build()) + .matched(); matched .response() @@ -264,6 +287,7 @@ public void close() { private String createNewCluster(EmrTestEnvironmentProperties properties) { EmrTestEnvironmentProperties.NewCluster cluster = properties.getCluster(); + RunJobFlowRequest request = RunJobFlowRequest.builder() .name("OpenLineageIntegrationTest") @@ -280,12 +304,18 @@ private String createNewCluster(EmrTestEnvironmentProperties properties) { "hive.execution.engine", "spark")) .build()) + .autoTerminationPolicy( + AutoTerminationPolicy.builder() + .idleTimeout(cluster.getIdleClusterTerminationSeconds()) + .build()) .instances( JobFlowInstancesConfig.builder() .instanceCount(1) .keepJobFlowAliveWhenNoSteps(true) // Cluster doesn't shut down immediately .masterInstanceType(cluster.getMasterInstanceType()) .slaveInstanceType(cluster.getSlaveInstanceType()) + .ec2SubnetId(cluster.getSubnetId()) + .ec2KeyName(cluster.ec2SshKeyName) .build()) .jobFlowRole(cluster.getEc2InstanceProfile()) .serviceRole(cluster.getServiceRole()) @@ -296,7 +326,7 @@ private String createNewCluster(EmrTestEnvironmentProperties properties) { } private void waitForClusterReady(String clusterId) { - log.info("Waiting for cluster [{}] ready", clusterId); + log.info("Waiting for cluster [{}] ready...", clusterId); // The default waiting strategy is to poll the cluster for 30 minutes (max 60 times) with around // 30 seconds between each attempt until the cluster says it is ready. ResponseOrException waiterResponse = @@ -304,12 +334,7 @@ private void waitForClusterReady(String clusterId) { .waitUntilClusterRunning(DescribeClusterRequest.builder().clusterId(clusterId).build()) .matched(); - waiterResponse - .response() - .ifPresent( - response -> { - log.info("Cluster [{}] is ready", clusterId); - }); + waiterResponse.response().ifPresent(response -> log.info("Cluster [{}] is ready", clusterId)); waiterResponse .exception() @@ -331,10 +356,7 @@ private void waitForClusterTerminated(String clusterId) { waiterResponse .response() - .ifPresent( - response -> { - log.info("Cluster [{}] has been terminated", clusterId); - }); + .ifPresent(response -> log.info("Cluster [{}] has been terminated", clusterId)); waiterResponse .exception() diff --git a/integration/spark/app/src/test/resources/emr_test_jobs/glue_symlink_implicit_account_id.py b/integration/spark/app/src/test/resources/emr_test_jobs/glue_symlink_implicit_account_id.py new file mode 100644 index 0000000000..4e24f28937 --- /dev/null +++ b/integration/spark/app/src/test/resources/emr_test_jobs/glue_symlink_implicit_account_id.py @@ -0,0 +1,54 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 + +from pyspark.sql import SparkSession + +from time import sleep + +# Parameter provisioned by the templating system +BUCKET_NAME = "{{bucketName}}" +OUTPUT_PREFIX = "{{outputPrefix}}" +DATABASE_NAME = "{{databaseName}}" +SOURCE_TABLE_NAME = "{{sourceTableName}}" +DESTINATION_TABLE_NAME = "{{destinationTableName}}" + +spark = ( + SparkSession.builder.appName("Glue symlink implicit account id") + .config( + "hive.metastore.client.factory.class", + "com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory", + ) + .config("spark.sql.catalogImplementation", "hive") + .getOrCreate() +) + +spark.sql(f"DROP DATABASE IF EXISTS {DATABASE_NAME} CASCADE") + +spark.sql(f"CREATE DATABASE {DATABASE_NAME} LOCATION 's3://{BUCKET_NAME}/{OUTPUT_PREFIX}/{DATABASE_NAME}/'") + +source_table = f"{DATABASE_NAME}.{SOURCE_TABLE_NAME}" +people_df = spark.createDataFrame( + [ + (1, "John", "Smith", 49), + (2, "Mary", "Brown", 12), + (3, "Tom", "White", 51), + (4, "Bruce", "Willis", 18), + (5, "Jason", "Mane", 22), + ], + ["id", "first_name", "last_name", "age"], +) +people_df.writeTo(source_table).create() + +destination_table = f"{DATABASE_NAME}.{DESTINATION_TABLE_NAME}" +spark.sql(f"""CREATE TABLE {destination_table} AS (SELECT * FROM {source_table})""") + +( + spark.sql(f"SELECT * FROM {source_table}") + .write.mode("overwrite") + .format("parquet") + .saveAsTable(f"{DATABASE_NAME}.{DESTINATION_TABLE_NAME}") +) + +spark.sql(f"DROP DATABASE IF EXISTS {DATABASE_NAME} CASCADE") + +sleep(3) diff --git a/integration/spark/build.gradle b/integration/spark/build.gradle index 6793c3eae1..5ea100bdb2 100644 --- a/integration/spark/build.gradle +++ b/integration/spark/build.gradle @@ -182,6 +182,12 @@ shadowJar { relocate "com.fasterxml.jackson", "io.openlineage.spark.shaded.com.fasterxml.jackson" relocate "org.LatencyUtils", "io.openlineage.spark.shaded.org.latencyutils" relocate "org.HdrHistogram", "io.openlineage.spark.shaded.org.hdrhistogram" + + // AWS SDK dependencies + relocate "software.amazon", "io.openlineage.spark.shaded.software.amazon" + relocate "org.reactivestreams", "io.openlineage.spark.shaded.org.reactivestreams" + relocate "io.netty", "io.openlineage.spark.shaded.io.netty" + manifest { attributes( "Created-By": "Gradle ${gradle.gradleVersion}", diff --git a/integration/spark/shared/build.gradle b/integration/spark/shared/build.gradle index 3816c599a3..a760885c4c 100644 --- a/integration/spark/shared/build.gradle +++ b/integration/spark/shared/build.gradle @@ -37,7 +37,7 @@ ext { postgresqlVersion = "42.7.4" sqlLiteVersion = "3.46.1.3" testcontainersVersion = "1.19.3" - micrometerVersion = '1.13.2' + awsSdkVersion = '2.28.11' sparkVersion = project.findProperty("shared.spark.version") sparkSeries = sparkVersion.substring(0, 3) @@ -64,6 +64,16 @@ dependencies { exclude(group: "com.fasterxml.jackson.module") } + implementation(platform("software.amazon.awssdk:bom:${awsSdkVersion}")) + implementation("software.amazon.awssdk:sts") + implementation("software.amazon.awssdk:url-connection-client") + scala212Implementation(platform("software.amazon.awssdk:bom:${awsSdkVersion}")) + scala212Implementation("software.amazon.awssdk:sts") + scala212Implementation("software.amazon.awssdk:url-connection-client") + scala213Implementation(platform("software.amazon.awssdk:bom:${awsSdkVersion}")) + scala213Implementation("software.amazon.awssdk:sts") + scala213Implementation("software.amazon.awssdk:url-connection-client") + // TODO: Convert these to testImplementation testFixturesApi(platform("org.junit:junit-bom:${junit5Version}")) testFixturesApi("org.junit.jupiter:junit-jupiter") diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsAccountIdFetcher.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsAccountIdFetcher.java new file mode 100644 index 0000000000..cbce9fd121 --- /dev/null +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsAccountIdFetcher.java @@ -0,0 +1,40 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent.util; + +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; +import software.amazon.awssdk.http.urlconnection.UrlConnectionHttpClient; +import software.amazon.awssdk.services.sts.StsClient; +import software.amazon.awssdk.services.sts.model.GetCallerIdentityRequest; +import software.amazon.awssdk.services.sts.model.GetCallerIdentityResponse; + +/** + * Obtains and caches the account ID using the AWS SDK. The returned value is cached between + * invocations. This could potentially cause problems when the application is using custom + * credentials provider, but we don't support dynamic credentials providers anyway. + */ +@Slf4j +@UtilityClass +public class AwsAccountIdFetcher { + private static String accountId; + + public static String getAccountId() { + if (accountId == null) { + log.info("Building STS client."); + try (StsClient stsClient = + StsClient.builder().httpClient(UrlConnectionHttpClient.builder().build()).build()) { + GetCallerIdentityRequest request = GetCallerIdentityRequest.builder().build(); + GetCallerIdentityResponse response = stsClient.getCallerIdentity(request); + accountId = response.account(); + log.info("Retrieved account ID [{}].", accountId); + } + } else { + log.debug("Using cached account ID [{}].", accountId); + } + return accountId; + } +} diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsUtils.java new file mode 100644 index 0000000000..57a5d0710c --- /dev/null +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/AwsUtils.java @@ -0,0 +1,119 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent.util; + +import java.util.Optional; +import lombok.SneakyThrows; +import lombok.experimental.UtilityClass; +import lombok.extern.slf4j.Slf4j; +import org.apache.hadoop.conf.Configuration; +import org.apache.spark.SparkConf; +import org.jetbrains.annotations.NotNull; + +@Slf4j +@UtilityClass +public class AwsUtils { + + public static final String HIVE_METASTORE_CLIENT_FACTORY_CLASS = + "hive.metastore.client.factory.class"; + public static final String AWS_GLUE_HIVE_FACTORY_CLASS = + "com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory"; + private static final String HIVE_METASTORE_GLUE_CATALOG_ID_KEY = "hive.metastore.glue.catalogid"; + + @SneakyThrows + public static Optional getGlueArn(SparkConf sparkConf, Configuration hadoopConf) { + if (isHiveUsingGlue(sparkConf, hadoopConf)) { + return awsRegion() + .flatMap( + region -> + getGlueCatalogId(sparkConf, hadoopConf) + .map(glueCatalogId -> "arn:aws:glue:" + region + ":" + glueCatalogId)); + } else { + return Optional.empty(); + } + } + + /** + * Obtains the Glue catalog ID. + * + *

There is no single place where Glue catalog ID is located. It depends on the environment + * where the application is running and optional, extra configuration. + */ + private static @NotNull Optional getGlueCatalogId( + SparkConf sparkConf, Configuration hadoopConf) { + /* + The ID of the Glue catalog can be specified explicitly. If it is not, then the account ID of the current account + is used. + + To specify the catalog ID directly, the property "hive.metastore.glue.catalogid" is used. This method is useful + in scenarios when the application should use the organization's Glue catalog instead of the current account catalog. + + When the catalog ID is not specified, there are different ways to determine the account ID. In environments like + Glue jobs, it is conveniently available as a Spark property. In other environments (like EMR), we have to use + AWS SDK to determine the current account ID. + */ + + Optional explicitGlueCatalogId = getExplicitGlueCatalogId(sparkConf, hadoopConf); + if (explicitGlueCatalogId.isPresent()) { + return explicitGlueCatalogId; + } + + Optional glueJobAccountId = + SparkConfUtils.findSparkConfigKey(sparkConf, "spark.glue.accountId"); + if (glueJobAccountId.isPresent()) { + log.debug("Using [spark.glue.account] property [{}] as catalog ID.", glueJobAccountId.get()); + return glueJobAccountId; + } else { + log.debug("Fetching current account ID to use as the catalog ID."); + return Optional.of(AwsAccountIdFetcher.getAccountId()); + } + } + + /** Obtains the Glue catalog id when it is specified explicitly. */ + private static Optional getExplicitGlueCatalogId( + SparkConf sparkConf, Configuration hadoopConf) { + /* + For environments like EMR the catalog ID is specified in Spark properties. + For other environments like Athena it is specified as Hadoop properties. + + Note for Athena: Even though the catalog ID is specified with prefix "spark.hadoop", it is removed by SparkHadoopUtil + */ + Optional sparkPropertyCatalogId = + SparkConfUtils.findSparkConfigKey(sparkConf, HIVE_METASTORE_GLUE_CATALOG_ID_KEY); + if (sparkPropertyCatalogId.isPresent()) { + log.debug( + "There is an explicit catalog ID [{}} passed as [{}] Spark property.", + sparkPropertyCatalogId.get(), + HIVE_METASTORE_GLUE_CATALOG_ID_KEY); + return sparkPropertyCatalogId; + } + Optional hadoopPropertyCatalogId = + SparkConfUtils.findHadoopConfigKey(hadoopConf, HIVE_METASTORE_GLUE_CATALOG_ID_KEY); + hadoopPropertyCatalogId.ifPresent( + s -> + log.debug( + "There is an explicit catalog ID [{}} passed as [{}] Hadoop property.", + s, + HIVE_METASTORE_GLUE_CATALOG_ID_KEY)); + return hadoopPropertyCatalogId; + } + + private static @NotNull Optional awsRegion() { + return Optional.ofNullable(System.getenv("AWS_DEFAULT_REGION")) + .filter(s -> !s.isEmpty()) + .map(Optional::of) + .orElseGet(() -> Optional.ofNullable(System.getenv("AWS_REGION"))); + } + + private static boolean isHiveUsingGlue(SparkConf sparkConf, Configuration hadoopConf) { + Optional hadoopFactoryClass = + SparkConfUtils.findHadoopConfigKey(hadoopConf, HIVE_METASTORE_CLIENT_FACTORY_CLASS); + Optional sparkFactoryClass = + SparkConfUtils.findSparkConfigKey(sparkConf, HIVE_METASTORE_CLIENT_FACTORY_CLASS); + return AWS_GLUE_HIVE_FACTORY_CLASS.equals( + hadoopFactoryClass.orElse(sparkFactoryClass.orElse(null))); + } +} diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PathUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PathUtils.java index 2e7e26cad4..0d8fbc8b84 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PathUtils.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PathUtils.java @@ -9,7 +9,6 @@ import io.openlineage.client.utils.filesystem.FilesystemDatasetUtils; import java.net.URI; import java.util.Optional; -import java.util.stream.Stream; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.hadoop.conf.Configuration; @@ -25,7 +24,6 @@ @SuppressWarnings("PMD.AvoidLiteralsInIfCondition") public class PathUtils { private static final String DEFAULT_DB = "default"; - private static final String HIVE_METASTORE_GLUE_CATALOG_ID_KEY = "hive.metastore.glue.catalogid"; public static final String GLUE_TABLE_PREFIX = "table/"; public static DatasetIdentifier fromPath(Path path) { @@ -60,7 +58,7 @@ public static DatasetIdentifier fromCatalogTable( Configuration hadoopConf = sparkContext.hadoopConfiguration(); Optional metastoreUri = getMetastoreUri(sparkContext); - Optional glueArn = getGlueArn(sparkConf, hadoopConf); + Optional glueArn = AwsUtils.getGlueArn(sparkConf, hadoopConf); if (glueArn.isPresent()) { // Even if glue catalog is used, it will have a hive metastore URI @@ -153,54 +151,6 @@ private static Optional getMetastoreUri(SparkContext context) { return SparkConfUtils.getMetastoreUri(context); } - @SneakyThrows - public static Optional getGlueArn(SparkConf sparkConf, Configuration hadoopConf) { - Optional clientFactory = - SparkConfUtils.findHadoopConfigKey(hadoopConf, "hive.metastore.client.factory.class"); - // Fetch from spark config if set. - clientFactory = - clientFactory.isPresent() - ? clientFactory - : SparkConfUtils.findSparkConfigKey(sparkConf, "hive.metastore.client.factory.class"); - if (!clientFactory.isPresent() - || !"com.amazonaws.glue.catalog.metastore.AWSGlueDataCatalogHiveClientFactory" - .equals(clientFactory.get())) { - return Optional.empty(); - } - - Optional region = - Optional.ofNullable(System.getenv("AWS_DEFAULT_REGION")) - .filter(s -> !s.isEmpty()) - .map(Optional::of) - .orElseGet(() -> Optional.ofNullable(System.getenv("AWS_REGION"))); - - Optional accountId = - SparkConfUtils.findSparkConfigKey(sparkConf, "spark.glue.accountId"); - // For AWS Glue catalog in EMR spark - // Glue catalog with EMR guide: - // https://docs.aws.amazon.com/emr/latest/ReleaseGuide/emr-spark-glue.html - Optional glueCatalogIdForEMR = - SparkConfUtils.findSparkConfigKey(sparkConf, HIVE_METASTORE_GLUE_CATALOG_ID_KEY); - // For AWS Glue access in Athena for Spark - // Guide: https://docs.aws.amazon.com/athena/latest/ug/spark-notebooks-cross-account-glue.html - // spark config "spark.hadoop.hive.metastore.glue.catalogid" is copied to hadoop - // "hive.metastore.glue.catalogid" by SparkHadoopUtil (removing the prefix spark.hadoop) - Optional glueCatalogIdForAthena = - SparkConfUtils.findHadoopConfigKey(hadoopConf, HIVE_METASTORE_GLUE_CATALOG_ID_KEY); - - Optional glueCatalogId = - Stream.of(glueCatalogIdForEMR, glueCatalogIdForAthena, accountId) - .filter(Optional::isPresent) - .findFirst() - .orElse(Optional.empty()); - - if (!region.isPresent() || !glueCatalogId.isPresent()) { - return Optional.empty(); - } - - return Optional.of("arn:aws:glue:" + region.get() + ":" + glueCatalogId.get()); - } - /** Get DatasetIdentifier name in format database.table or table */ private static String nameFromTableIdentifier(TableIdentifier identifier) { return nameFromTableIdentifier(identifier, "."); diff --git a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/catalog/IcebergHandler.java b/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/catalog/IcebergHandler.java index 250535a04f..04f7b46849 100644 --- a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/catalog/IcebergHandler.java +++ b/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/catalog/IcebergHandler.java @@ -11,6 +11,7 @@ import io.openlineage.client.utils.DatasetIdentifier; import io.openlineage.client.utils.DatasetIdentifier.SymlinkType; import io.openlineage.client.utils.filesystem.FilesystemDatasetUtils; +import io.openlineage.spark.agent.util.AwsUtils; import io.openlineage.spark.agent.util.PathUtils; import io.openlineage.spark.agent.util.ScalaConversionUtils; import io.openlineage.spark.agent.util.SparkConfUtils; @@ -222,7 +223,7 @@ private DatasetIdentifier getNessieIdentifier(@Nullable String confUri, String t private DatasetIdentifier getGlueIdentifier(String table, SparkSession sparkSession) { SparkContext sparkContext = sparkSession.sparkContext(); String arn = - PathUtils.getGlueArn(sparkContext.getConf(), sparkContext.hadoopConfiguration()).get(); + AwsUtils.getGlueArn(sparkContext.getConf(), sparkContext.hadoopConfiguration()).get(); return new DatasetIdentifier(GLUE_TABLE_PREFIX + table.replace(".", "/"), arn); } From bb9b902457ae944d54478d44e0720d03a1bedb66 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 10:21:53 +0200 Subject: [PATCH 20/89] Bump the client-java group across 1 directory with 7 updates (#3174) Bumps the client-java group with 7 updates in the /client/java directory: | Package | From | To | | --- | --- | --- | | [io.micrometer:micrometer-core](https://github.com/micrometer-metrics/micrometer) | `1.13.5` | `1.13.6` | | [io.micrometer:micrometer-registry-statsd](https://github.com/micrometer-metrics/micrometer) | `1.13.5` | `1.13.6` | | [com.amazonaws:amazon-kinesis-producer](https://github.com/awslabs/amazon-kinesis-producer) | `0.15.11` | `0.15.12` | | [com.google.cloud:google-cloud-datalineage](https://github.com/googleapis/google-cloud-java) | `0.43.0` | `0.44.0` | | [com.google.cloud:google-cloud-storage](https://github.com/googleapis/java-storage) | `2.43.1` | `2.43.2` | | [com.google.cloud:google-cloud-nio](https://github.com/googleapis/java-storage-nio) | `0.127.24` | `0.127.25` | | software.amazon.awssdk:bom | `2.28.16` | `2.28.26` | Updates `io.micrometer:micrometer-core` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) Updates `com.amazonaws:amazon-kinesis-producer` from 0.15.11 to 0.15.12 - [Release notes](https://github.com/awslabs/amazon-kinesis-producer/releases) - [Changelog](https://github.com/awslabs/amazon-kinesis-producer/blob/master/CHANGELOG.md) - [Commits](https://github.com/awslabs/amazon-kinesis-producer/compare/v0.15.11...v0.15.12) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) Updates `com.google.cloud:google-cloud-datalineage` from 0.43.0 to 0.44.0 - [Release notes](https://github.com/googleapis/google-cloud-java/releases) - [Changelog](https://github.com/googleapis/google-cloud-java/blob/v0.44.0/CHANGES.md) - [Commits](https://github.com/googleapis/google-cloud-java/compare/v0.43.0...v0.44.0) Updates `com.google.cloud:google-cloud-storage` from 2.43.1 to 2.43.2 - [Release notes](https://github.com/googleapis/java-storage/releases) - [Changelog](https://github.com/googleapis/java-storage/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/java-storage/compare/v2.43.1...v2.43.2) Updates `com.google.cloud:google-cloud-nio` from 0.127.24 to 0.127.25 - [Release notes](https://github.com/googleapis/java-storage-nio/releases) - [Changelog](https://github.com/googleapis/java-storage-nio/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/java-storage-nio/compare/v0.127.24...v0.127.25) Updates `software.amazon.awssdk:bom` from 2.28.16 to 2.28.26 --- updated-dependencies: - dependency-name: io.micrometer:micrometer-core dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.amazonaws:amazon-kinesis-producer dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-datalineage dependency-type: direct:production update-type: version-update:semver-minor dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-storage dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-nio dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- client/java/build.gradle | 4 ++-- client/java/transports-gcplineage/build.gradle | 2 +- client/java/transports-gcs/build.gradle | 4 ++-- client/java/transports-s3/build.gradle | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/client/java/build.gradle b/client/java/build.gradle index 9c44c0dd11..777be11e79 100644 --- a/client/java/build.gradle +++ b/client/java/build.gradle @@ -51,7 +51,7 @@ ext { junit5Version = '5.11.2' lombokVersion = '1.18.34' mockitoVersion = '5.2.0' - micrometerVersion = '1.13.5' + micrometerVersion = '1.13.6' isReleaseVersion = !version.endsWith('SNAPSHOT') guavaVersion = '33.3.1-jre' } @@ -74,7 +74,7 @@ dependencies { compileOnly 'com.google.code.findbugs:jsr305:3.0.2' compileOnly 'org.apache.kafka:kafka-clients:3.8.0' - compileOnly 'com.amazonaws:amazon-kinesis-producer:0.15.11' + compileOnly 'com.amazonaws:amazon-kinesis-producer:0.15.12' compileOnly "org.projectlombok:lombok:${lombokVersion}" compileOnly "io.micrometer:micrometer-registry-statsd:${micrometerVersion}" annotationProcessor "org.projectlombok:lombok:${lombokVersion}" diff --git a/client/java/transports-gcplineage/build.gradle b/client/java/transports-gcplineage/build.gradle index 58b834b572..3c975e0778 100644 --- a/client/java/transports-gcplineage/build.gradle +++ b/client/java/transports-gcplineage/build.gradle @@ -24,7 +24,7 @@ ext { dependencies { implementation("com.google.cloud.datalineage:producerclient-java8:1.0.0") - implementation("com.google.cloud:google-cloud-datalineage:0.43.0") + implementation("com.google.cloud:google-cloud-datalineage:0.44.0") } apply from: '../transports.build.gradle' diff --git a/client/java/transports-gcs/build.gradle b/client/java/transports-gcs/build.gradle index 78e3db7470..6556e4a13a 100644 --- a/client/java/transports-gcs/build.gradle +++ b/client/java/transports-gcs/build.gradle @@ -23,9 +23,9 @@ ext { } dependencies { - implementation('com.google.cloud:google-cloud-storage:2.43.1') + implementation('com.google.cloud:google-cloud-storage:2.43.2') - testImplementation('com.google.cloud:google-cloud-nio:0.127.24') + testImplementation('com.google.cloud:google-cloud-nio:0.127.25') } shadowJar { diff --git a/client/java/transports-s3/build.gradle b/client/java/transports-s3/build.gradle index c64f24fb50..ada4642003 100644 --- a/client/java/transports-s3/build.gradle +++ b/client/java/transports-s3/build.gradle @@ -37,7 +37,7 @@ sourceSets { dependencies { compileOnly("com.google.code.findbugs:jsr305:3.0.2") - implementation(platform("software.amazon.awssdk:bom:2.28.16")) + implementation(platform("software.amazon.awssdk:bom:2.28.26")) implementation("software.amazon.awssdk:auth") implementation("software.amazon.awssdk:s3") implementation("software.amazon.awssdk:url-connection-client") From 96c8ff66a2ec5663b6b3380e67e042890f0a79fa Mon Sep 17 00:00:00 2001 From: Jakub Dardzinski Date: Mon, 21 Oct 2024 17:29:03 +0200 Subject: [PATCH 21/89] Fix subquery column lineage. (#3170) Signed-off-by: Jakub Dardzinski --- integration/sql/impl/src/context/mod.rs | 32 +++++++-- integration/sql/impl/src/visitor.rs | 5 +- .../table_lineage/test_alias_resolving.rs | 72 +++++++++++++++++++ 3 files changed, 99 insertions(+), 10 deletions(-) diff --git a/integration/sql/impl/src/context/mod.rs b/integration/sql/impl/src/context/mod.rs index 6bedef6256..d1d5101683 100644 --- a/integration/sql/impl/src/context/mod.rs +++ b/integration/sql/impl/src/context/mod.rs @@ -352,8 +352,6 @@ impl<'a> Context<'a> { if !traversed.is_empty() { expanded.extend(traversed.iter().cloned()); - } else { - expanded.insert(ancestor.clone()); } } result.insert(col.clone(), expanded); @@ -367,19 +365,41 @@ impl<'a> Context<'a> { fn expand_ancestors(ancestor: ColumnMeta, old: &ContextFrame) -> Vec { let mut result = Vec::new(); let mut stack = Vec::new(); + let mut max_iter = 40; stack.push(ancestor.clone()); while let Some(current) = stack.pop() { - let column_ancestors = old.column_ancestry.get(¤t); + max_iter -= 1; + if max_iter <= 0 { + break; + } + + let column_ancestors = old + .column_ancestry + .get(¤t) + // try to find by name in case of column without origin + // this can happen when selecting from subquery without alias + .or_else(|| { + old.column_ancestry.iter().find_map(|(col, anc)| { + if col.name == current.name && current.origin.is_none() { + Some(anc) + } else { + None + } + }) + }); if column_ancestors.is_none() { result.push(current.clone()); continue; } if let Some(ancestors) = column_ancestors { - for ancestor in ancestors { - stack.push(ancestor.clone()); + for anc in ancestors { + // avoid infinite loop + if anc != ¤t { + stack.push(anc.clone()); + } } } } @@ -434,8 +454,6 @@ impl<'a> Context<'a> { if !removed_circular_deps.is_empty() { frame.column_ancestry.extend(old_ancestry); - } else { - frame.column_ancestry.extend(removed_circular_deps); } frame.dependencies.extend(old.dependencies); diff --git a/integration/sql/impl/src/visitor.rs b/integration/sql/impl/src/visitor.rs index 42dbaeb86a..1f04e4ce3e 100644 --- a/integration/sql/impl/src/visitor.rs +++ b/integration/sql/impl/src/visitor.rs @@ -555,9 +555,8 @@ impl Visit for Query { context.unset_frame_to_main_body(); } - match &self.with { - Some(with) => with.visit(context)?, - None => (), + if let Some(with) = &self.with { + with.visit(context)? } let with_frame = context.pop_frame().unwrap(); diff --git a/integration/sql/impl/tests/table_lineage/test_alias_resolving.rs b/integration/sql/impl/tests/table_lineage/test_alias_resolving.rs index 29634a6145..80ed464596 100644 --- a/integration/sql/impl/tests/table_lineage/test_alias_resolving.rs +++ b/integration/sql/impl/tests/table_lineage/test_alias_resolving.rs @@ -56,6 +56,78 @@ fn table_reference_with_simple_q_ctes() { ) } +#[test] +fn table_complex() { + let output_1 = test_sql( + " + SELECT COL1 + FROM + ( + SELECT COL1 + FROM + ( + SELECT + COL2 + AS COL1 + FROM TAB1 + ) INNER_QUERY + );", + ) + .unwrap(); + + let output_2 = test_sql( + " + SELECT COL1 + FROM + ( + SELECT COL1 + FROM + ( + SELECT + COL2 + AS COL1 + FROM TAB1 + ) + );", + ) + .unwrap(); + + let output_3 = test_sql( + " + SELECT COL1 + FROM + ( + SELECT COL1 + FROM + ( + SELECT + COL2 + AS COL1 + FROM TAB1 + ) INNER_QUERY + ) OUTER_QUERY;", + ) + .unwrap(); + + // assert all outputs are the same + assert_eq!(output_1, output_2); + assert_eq!(output_2, output_3); + + assert_eq!( + output_1.column_lineage, + vec![ColumnLineage { + descendant: ColumnMeta { + origin: None, + name: "COL1".to_string() + }, + lineage: vec![ColumnMeta { + origin: Some(table("TAB1")), + name: "COL2".to_string() + },] + }] + ); +} + #[test] fn table_reference_with_passersby_ctes() { let query_string = " From c1bd0ca8a9539e7d140e4211e1eca09843fa4a79 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 23 Oct 2024 00:00:30 +0200 Subject: [PATCH 22/89] feature: Improve Databricks integration tests (#3176) * The tests use updated Databricks SDK * The tests use DynamicParameters like EMR integration tests * The DynamicParameter enum have been renamed to EMRDynamicParameter and the name has been used for DynamicParameter interface * The events from executions are stored locally for troubleshooting * The cluster logs are stored locally for troubleshooting * The prefix for test parameters has changed * The system properties for Databricks tests are passed by Gradle automatically * The developer can reuse clusters Signed-off-by: Artur Owczarek --- .circleci/continue_config.yml | 2 +- integration/spark/app/build.gradle | 25 +- .../agent/DatabricksDynamicParameter.java | 70 +++++ .../agent/DatabricksIntegrationTest.java | 30 +- .../spark/agent/DatabricksUtils.java | 282 ++++++++++++------ .../spark/agent/DynamicParameter.java | 122 ++++---- .../spark/agent/EmrDynamicParameter.java | 112 +++++++ .../spark/agent/EmrIntegrationTest.java | 33 +- .../spark/agent/EmrTestEnvironment.java | 2 +- .../agent/databricks/ClusterLogConf.java | 15 - .../spark/agent/databricks/CreateCluster.java | 25 -- .../spark/agent/databricks/InitScript.java | 15 - .../databricks/WorkspaceDestination.java | 15 - 13 files changed, 489 insertions(+), 259 deletions(-) create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java create mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrDynamicParameter.java delete mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/ClusterLogConf.java delete mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/CreateCluster.java delete mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/InitScript.java delete mode 100644 integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/WorkspaceDestination.java diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index bffa003ecb..c91f47d629 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -876,7 +876,7 @@ jobs: sudo update-alternatives --set java ${JAVA_BIN} sudo update-alternatives --set javac ${JAVAC_BIN} - run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME} - - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -PdatabricksHost=$DATABRICKS_HOST -PdatabricksToken=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME} + - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -Popenlineage.tests.databricks.host=$DATABRICKS_HOST -Popenlineage.tests.databricks.token=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME} - store_test_results: path: app/build/test-results/databricksIntegrationTest - store_artifacts: diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 720bdc344e..6332c7d773 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -119,7 +119,7 @@ dependencies { testImplementation("org.mockito:mockito-core:${mockitoVersion}") testImplementation("org.mockito:mockito-inline:${mockitoVersion}") testImplementation("org.mockito:mockito-junit-jupiter:${mockitoVersion}") - testImplementation("com.databricks:databricks-sdk-java:0.4.0") { + testImplementation("com.databricks:databricks-sdk-java:0.32.2") { exclude group: 'com.fasterxml.jackson.core' exclude group: 'com.fasterxml.jackson.module' } @@ -406,6 +406,20 @@ tasks.register("databricksIntegrationTest", Test) { useJUnitPlatform { includeTags("databricks") } + options { + systemProperties = openLineageSystemProperties() + } +} + +/** + The "test" task runs JUnit tests in a separate JVM, and by default, system properties (-D parameters) + are not forwarded to it. Gradle allows forwarding them with -P parameter, but not everyone is aware of that. + Here, we selectively pass only properties that start with "openlineage" to avoid conflicts with + environment-related properties (e.g., current working directory) that could affect resource discovery. + The properties are later used {@link io.openlineage.spark.agent.DynamicParameter} + */ +private Map openLineageSystemProperties() { + System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } } @@ -418,14 +432,7 @@ tasks.register("awsIntegrationTest", Test) { includeTags("aws") } options { - /* - The "test" task runs JUnit tests in a separate JVM, and by default, system properties (-D parameters) - are not forwarded to it. Gradle allows forwarding them with -P parameter, but not everyone is aware of that. - Here, we selectively pass only properties that start with "openlineage" to avoid conflicts with - environment-related properties (e.g., current working directory) that could affect resource discovery. - The properties are later used in template resolution. - */ - systemProperties = System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } + systemProperties = openLineageSystemProperties() } testLogging { exceptionFormat "full" diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java new file mode 100644 index 0000000000..14de73fa30 --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java @@ -0,0 +1,70 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import javax.annotation.Nullable; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; + +/** + * Class for dynamic provisioning of parameters. In the current form it retrieves the values from + * the system properties. They can be set using -Dopenlineage.tests.databricks.parameterName=value + * when running the application. + */ +@Slf4j +@Getter +public enum DatabricksDynamicParameter implements DynamicParameter { + + // DEVELOPMENT PARAMETERS + + /** + * The ID of the cluster to use. If specified, the tests will use this existing cluster instead of + * creating a new one. + */ + ClusterId("clusterId", ""), + + /** + * When set to {@code true}, prevents the EMR cluster from terminating after tests complete. This + * allows for manual inspection and debugging of the cluster state. + */ + PreventClusterTermination("preventClusterTermination", "false"), + + // WORKSPACE PARAMETERS + + Host("host"), + + Token("token"), + + // CLUSTER PARAMETERS + + /** The Spark version as provided by Gradle. This case is not using the openlineage prefix. */ + SparkVersion("spark.version", null, "3.5.2"); + + private final String parameterName; + private final String defaultValue; + private final String prefix; + + DatabricksDynamicParameter(String parameterName) { + this(parameterName, null); + } + + DatabricksDynamicParameter(String parameterName, @Nullable String defaultValue) { + this(parameterName, "openlineage.tests.databricks", defaultValue); + } + + DatabricksDynamicParameter( + String parameterName, @Nullable String prefix, @Nullable String defaultValue) { + this.parameterName = parameterName; + this.defaultValue = defaultValue; + this.prefix = prefix; + } + + @Override + public Logger getLog() { + return log; + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java index 871ab44f04..266b370a12 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java @@ -19,6 +19,9 @@ import io.openlineage.client.OpenLineage.RunEvent; import io.openlineage.client.OpenLineage.RunEvent.EventType; import io.openlineage.client.OpenLineage.RunFacet; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Map; import java.util.Optional; @@ -45,14 +48,16 @@ class DatabricksIntegrationTest { private static WorkspaceClient workspace; private static String clusterId; + private static final String executionTimestamp = + ZonedDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); @BeforeAll @SneakyThrows public static void setup() { DatabricksConfig config = new DatabricksConfig() - .setHost(System.getProperty("databricksHost")) - .setToken(System.getProperty("databricksToken")); + .setHost(DatabricksDynamicParameter.Host.resolve()) + .setToken(DatabricksDynamicParameter.Token.resolve()); workspace = new WorkspaceClient(config); clusterId = init(workspace); @@ -66,14 +71,20 @@ public void beforeEach() { @AfterAll public static void shutdown() { if (clusterId != null) { - DatabricksUtils.shutdown(workspace, clusterId); + boolean existingClusterUsed = "".equals(DatabricksDynamicParameter.ClusterId.resolve()); + DatabricksUtils.shutdown( + workspace, + clusterId, + Boolean.parseBoolean(DatabricksDynamicParameter.PreventClusterTermination.resolve()), + existingClusterUsed, + executionTimestamp); } } @Test @SneakyThrows void testCreateTableAsSelect() { - List runEvents = runScript(workspace, clusterId, "ctas.py"); + List runEvents = runScript(workspace, clusterId, "ctas.py", executionTimestamp); RunEvent lastEvent = runEvents.get(runEvents.size() - 1); OutputDataset outputDataset = lastEvent.getOutputs().get(0); @@ -124,7 +135,8 @@ void testCreateTableAsSelect() { @Test @SneakyThrows void testNarrowTransformation() { - List runEvents = runScript(workspace, clusterId, "narrow_transformation.py"); + List runEvents = + runScript(workspace, clusterId, "narrow_transformation.py", executionTimestamp); assertThat(runEvents).isNotEmpty(); // assert start event exists @@ -156,7 +168,8 @@ void testNarrowTransformation() { @Test @SneakyThrows void testWideTransformation() { - List runEvents = runScript(workspace, clusterId, "wide_transformation.py"); + List runEvents = + runScript(workspace, clusterId, "wide_transformation.py", executionTimestamp); assertThat(runEvents).isNotEmpty(); // assert start event exists @@ -181,7 +194,8 @@ void testWideTransformation() { @Test void testWriteReadFromTableWithLocation() { - List runEvents = runScript(workspace, clusterId, "dataset_names.py"); + List runEvents = + runScript(workspace, clusterId, "dataset_names.py", executionTimestamp); // find complete event with output dataset containing name OutputDataset outputDataset = @@ -210,7 +224,7 @@ void testWriteReadFromTableWithLocation() { @Test @SneakyThrows void testMergeInto() { - List runEvents = runScript(workspace, clusterId, "merge_into.py"); + List runEvents = runScript(workspace, clusterId, "merge_into.py", executionTimestamp); RunEvent event = runEvents.stream() diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java index b77022bb73..b5c13ca78a 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java @@ -10,20 +10,27 @@ import com.databricks.sdk.WorkspaceClient; import com.databricks.sdk.service.compute.ClusterDetails; +import com.databricks.sdk.service.compute.ClusterLogConf; +import com.databricks.sdk.service.compute.CreateCluster; import com.databricks.sdk.service.compute.CreateClusterResponse; +import com.databricks.sdk.service.compute.DbfsStorageInfo; +import com.databricks.sdk.service.compute.InitScriptInfo; import com.databricks.sdk.service.compute.ListClustersRequest; +import com.databricks.sdk.service.compute.WorkspaceStorageInfo; import com.databricks.sdk.service.files.Delete; +import com.databricks.sdk.service.jobs.Run; import com.databricks.sdk.service.jobs.Source; import com.databricks.sdk.service.jobs.SparkPythonTask; import com.databricks.sdk.service.jobs.SubmitRun; +import com.databricks.sdk.service.jobs.SubmitRunResponse; import com.databricks.sdk.service.jobs.SubmitTask; +import com.databricks.sdk.service.workspace.Import; +import com.databricks.sdk.service.workspace.ImportFormat; +import com.databricks.sdk.support.Wait; +import com.google.common.collect.ImmutableList; import com.google.common.io.Resources; import io.openlineage.client.OpenLineage.RunEvent; import io.openlineage.client.OpenLineageClientUtils; -import io.openlineage.spark.agent.databricks.ClusterLogConf; -import io.openlineage.spark.agent.databricks.CreateCluster; -import io.openlineage.spark.agent.databricks.InitScript; -import io.openlineage.spark.agent.databricks.WorkspaceDestination; import java.io.FileInputStream; import java.io.FileWriter; import java.io.IOException; @@ -34,6 +41,7 @@ import java.nio.file.Paths; import java.time.Duration; import java.util.AbstractMap; +import java.util.Base64; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -43,7 +51,6 @@ import java.util.stream.StreamSupport; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; -import org.jetbrains.annotations.NotNull; @Slf4j public class DatabricksUtils { @@ -62,58 +69,84 @@ public class DatabricksUtils { public static final String NODE_TYPE = "Standard_DS3_v2"; public static final String INIT_SCRIPT_FILE = "/Shared/open-lineage-init-script.sh"; public static final String DBFS_CLUSTER_LOGS = "dbfs:/databricks/openlineage/cluster-logs"; - private static final String SPARK_VERSION = "spark.version"; public static final String DBFS_EVENTS_FILE = "dbfs:/databricks/openlineage/events_" + platformVersion() + ".log"; public static String platformVersion() { - return PLATFORM_VERSIONS.get(System.getProperty(SPARK_VERSION)).replace(".", "_"); + return PLATFORM_VERSIONS + .get(DatabricksDynamicParameter.SparkVersion.resolve()) + .replace(".", "_"); } @SneakyThrows static String init(WorkspaceClient workspace) { - uploadOpenlineageJar(workspace); - - // clear cluster logs - Delete deleteClusterLogs = new Delete(); - deleteClusterLogs.setPath(DBFS_CLUSTER_LOGS); - deleteClusterLogs.setRecursive(true); - workspace.dbfs().delete(deleteClusterLogs); - - // check if cluster is available - Iterable clusterDetails = workspace.clusters().list(new ListClustersRequest()); - if (clusterDetails != null) { - log.info("Encountered clusters to delete."); - StreamSupport.stream(clusterDetails.spliterator(), false) - .filter(cl -> cl.getClusterName().equals(getClusterName())) - .forEach( - cl -> { - log.info("Deleting a cluster {}-{}.", cl.getClusterName(), cl.getClusterId()); - workspace.clusters().permanentDelete(cl.getClusterId()); - }); + String resolvedClusterId = DatabricksDynamicParameter.ClusterId.resolve(); + boolean attachingToExistingCluster = !"".equals(resolvedClusterId); + + uploadOpenLineageJar(workspace); + uploadInitializationScript(workspace); + + if (attachingToExistingCluster) { + log.info("Attaching to the existing cluster [{}]", resolvedClusterId); + /* + OpenLineage jars are copied from DBFS to the cluster during the initialization script execution. + To ensure the updated jar is used, the cluster must be restarted after updating the jar. + Without a restart, the initialization script won't run again, and the old jar will still be used. + */ + log.warn( + "⚠️ The cluster must be restarted to apply changes if the OpenLineage jar has been updated. ⚠️"); + return resolvedClusterId; + } else { + // We may reuse the cluster name where there are existing old logs. This can happen if the + // tests failed. Here we make sure the logs are clean. + Delete deleteClusterLogs = new Delete(); + deleteClusterLogs.setPath(DBFS_CLUSTER_LOGS); + deleteClusterLogs.setRecursive(true); + workspace.dbfs().delete(deleteClusterLogs); + + log.info("Creating a new Databricks cluster."); + String sparkPlatformVersion = getSparkPlatformVersion(); + String clusterName = CLUSTER_NAME + "_" + getSparkPlatformVersion(); + log.debug("Ensuring the cluster with name [{}] doesn't exist.", clusterName); + for (ClusterDetails clusterDetail : workspace.clusters().list(new ListClustersRequest())) { + if (clusterDetail.getClusterName().equals(clusterName)) { + log.info( + "Deleting a cluster [{}] with ID [{}].", + clusterDetail.getClusterName(), + clusterDetail.getClusterId()); + workspace.clusters().permanentDelete(clusterDetail.getClusterId()); + } + } + Wait cluster = + createCluster(workspace, clusterName, sparkPlatformVersion); + + String clusterId = cluster.getResponse().getClusterId(); + log.info("Ensuring the new cluster [{}] with ID [{}] is running...", clusterName, clusterId); + cluster.get(Duration.ofMinutes(10)); + return clusterId; } - String clusterId = createCluster(workspace); - - log.info("Ensuring cluster is running"); - workspace.clusters().ensureClusterIsRunning(clusterId); - - return clusterId; } @SneakyThrows - static void shutdown(WorkspaceClient workspace, String clusterId) { + static void shutdown( + WorkspaceClient workspace, + String clusterId, + boolean preventClusterTermination, + boolean existingClusterUsed, + String executionTimestamp) { // remove events file workspace.dbfs().delete(DBFS_EVENTS_FILE); - // need to terminate cluster to have access to cluster logs - workspace.clusters().delete(clusterId); - workspace.clusters().waitGetClusterTerminated(clusterId); + if (!(preventClusterTermination || existingClusterUsed)) { + // need to terminate cluster to have access to cluster logs + workspace.clusters().delete(clusterId); + workspace.clusters().waitGetClusterTerminated(clusterId); + } - // wait for logs to be available Path clusterLogs = Paths.get(DBFS_CLUSTER_LOGS + "/" + clusterId + "/driver/log4j-active.log"); - + log.info("Waiting for the cluster logs to be available on DBFS under [{}]...", clusterLogs); await() - .atMost(Duration.ofSeconds(30)) + .atMost(Duration.ofSeconds(300)) .pollInterval(Duration.ofSeconds(3)) .until( () -> { @@ -125,24 +158,21 @@ static void shutdown(WorkspaceClient workspace, String clusterId) { }); // fetch logs and move to local file - FileWriter fileWriter = new FileWriter("./build/cluster-log4j.log"); - workspace.dbfs().readAllLines(clusterLogs, StandardCharsets.UTF_8).stream() - .forEach( - line -> { - try { - fileWriter.write(line + System.lineSeparator()); - } catch (IOException e) { - throw new RuntimeException(e); - } - }); + String logsLocation = "./build/" + executionTimestamp + "-cluster-log4j.log"; + log.info("Fetching cluster logs to [{}]", logsLocation); + writeLinesToFile( + logsLocation, workspace.dbfs().readAllLines(clusterLogs, StandardCharsets.UTF_8)); + log.info("Logs fetched."); workspace.dbfs().delete(clusterLogs.toAbsolutePath().toString()); } @SneakyThrows - static List runScript(WorkspaceClient workspace, String clusterId, String scriptName) { + static List runScript( + WorkspaceClient workspace, String clusterId, String scriptName, String executionTimestamp) { // upload scripts String dbfsScriptPath = "dbfs:/databricks/openlineage/scripts/" + scriptName; + log.info("Uploading script [{}] to [{}]", scriptName, dbfsScriptPath); String taskName = scriptName.replace(".py", ""); workspace @@ -151,6 +181,7 @@ static List runScript(WorkspaceClient workspace, String clusterId, Str Paths.get(dbfsScriptPath), readAllBytes( Paths.get(Resources.getResource("databricks_notebooks/" + scriptName).getPath()))); + log.info("The script [{}] has been uploaded to [{}].", scriptName, dbfsScriptPath); SparkPythonTask task = new SparkPythonTask(); task.setSource(Source.WORKSPACE); @@ -166,59 +197,63 @@ static List runScript(WorkspaceClient workspace, String clusterId, Str submitRun.setTasks(Collections.singletonList(runSubmitTaskSettings)); // trigger one time job - workspace.jobs().submit(submitRun).get(); + log.info("Submitting PySpark task [{}]...", taskName); + Wait submit = workspace.jobs().submit(submitRun); + log.info("PySpark task [{}] submitted. Waiting for completion...", taskName); + submit.get(); + log.info("PySpark task [{}] completed.", taskName); - return fetchEventsEmitted(workspace); + return fetchEventsEmitted(workspace, scriptName, executionTimestamp); } @SneakyThrows - private static String createCluster(WorkspaceClient workspace) { - HashMap sparkConf = new HashMap(); + private static Wait createCluster( + WorkspaceClient workspace, String clusterName, String sparkPlatformVersion) { + HashMap sparkConf = new HashMap<>(); sparkConf.put("spark.openlineage.facets.debug.disabled", "false"); sparkConf.put("spark.openlineage.transport.type", "file"); + // Each test case script should copy this file to dbfs:/databricks/openlineage/ at the end of + // execution. sparkConf.put("spark.openlineage.transport.location", "/tmp/events.log"); sparkConf.put("spark.extraListeners", "io.openlineage.spark.agent.OpenLineageSparkListener"); - sparkConf.put("spark.openlineage.version", "v1"); CreateCluster createCluster = - CreateCluster.builder() - .cluster_name(getClusterName()) - .spark_version(getSparkPlatformVersion()) - .node_type_id(NODE_TYPE) - .autotermination_minutes(10L) - .num_workers(1L) - .init_scripts( - new InitScript[] {new InitScript(new WorkspaceDestination(INIT_SCRIPT_FILE))}) - .spark_conf(sparkConf) - .cluster_log_conf(new ClusterLogConf(new WorkspaceDestination(DBFS_CLUSTER_LOGS))) - .build(); - - log.info("Creating cluster"); + new CreateCluster() + .setClusterName(clusterName) + .setSparkVersion(sparkPlatformVersion) + .setNodeTypeId(NODE_TYPE) + .setAutoterminationMinutes(10L) + .setNumWorkers(1L) + .setInitScripts( + ImmutableList.of( + new InitScriptInfo() + .setWorkspace(new WorkspaceStorageInfo().setDestination(INIT_SCRIPT_FILE)))) + .setSparkConf(sparkConf) + .setClusterLogConf( + new ClusterLogConf() + .setDbfs(new DbfsStorageInfo().setDestination(DBFS_CLUSTER_LOGS))); + + log.info("Creating cluster [{}]...", createCluster); workspace.config().setHttpTimeoutSeconds(600); // 10 minutes, otherwise it's rather setup issue - CreateClusterResponse response = - workspace - .apiClient() - .POST("/api/2.0/clusters/create", createCluster, CreateClusterResponse.class); - - return response.getClusterId(); - } - - @NotNull - private static String getClusterName() { - return CLUSTER_NAME + "_" + getSparkPlatformVersion(); + return workspace.clusters().create(createCluster); } private static String getSparkPlatformVersion() { - if (!PLATFORM_VERSIONS_NAMES.containsKey(System.getProperty(SPARK_VERSION))) { - log.error("Unsupported spark_version for databricks test {}", SPARK_VERSION); + String sparkVersion = DatabricksDynamicParameter.SparkVersion.resolve(); + if (!PLATFORM_VERSIONS_NAMES.containsKey(sparkVersion)) { + log.error("Unsupported [spark.version] for databricks test: [{}].", sparkVersion); } - log.info( - "Databricks version {}", PLATFORM_VERSIONS_NAMES.get(System.getProperty(SPARK_VERSION))); - return PLATFORM_VERSIONS_NAMES.get(System.getProperty(SPARK_VERSION)); + log.info("Databricks version: [{}].", PLATFORM_VERSIONS_NAMES.get(sparkVersion)); + return PLATFORM_VERSIONS_NAMES.get(sparkVersion); } + /** + * Copies the jar to the DBFS location from where it is copied to the driver host by the + * initialization script. The copying happens only on the cluster initialization, so you have to + * restart the cluster if you change the jar and want to use it. + */ @SneakyThrows - private static void uploadOpenlineageJar(WorkspaceClient workspace) { + private static void uploadOpenLineageJar(WorkspaceClient workspace) { Path jarFile = Files.list(Paths.get("../build/libs/")) .filter(p -> p.getFileName().toString().startsWith("openlineage-spark_")) @@ -245,11 +280,39 @@ private static void uploadOpenlineageJar(WorkspaceClient workspace) { .forEach(f -> workspace.dbfs().delete(f.getPath())); } - // upload to DBFS -> 12MB file upload need to go in chunks smaller than 1MB each + String destination = "dbfs:/databricks/openlineage/" + jarFile.getFileName(); + uploadFileToDbfs(workspace, jarFile, destination); + log.info("OpenLineage jar has been uploaded to [{}]", destination); + } + + /** + * Uploads the cluster initialization script to DBFS. + * + *

The script is used by the clusters to copy OpenLineage jar to the location where it can be + * loaded by the driver. + */ + private static void uploadInitializationScript(WorkspaceClient workspace) throws IOException { + String string = + Resources.toString( + Paths.get("../databricks/open-lineage-init-script.sh").toUri().toURL(), + StandardCharsets.UTF_8); + String encodedString = Base64.getEncoder().encodeToString(string.getBytes()); + workspace + .workspace() + .importContent( + new Import() + .setPath(INIT_SCRIPT_FILE) + .setContent(encodedString) + .setFormat(ImportFormat.AUTO) + .setOverwrite(true)); + } + + @SneakyThrows + private static void uploadFileToDbfs(WorkspaceClient workspace, Path jarFile, String toLocation) { FileInputStream fis = new FileInputStream(jarFile.toString()); - OutputStream outputStream = - workspace.dbfs().getOutputStream("dbfs:/databricks/openlineage/" + jarFile.getFileName()); + OutputStream outputStream = workspace.dbfs().getOutputStream(toLocation); + // upload to DBFS -> 12MB file upload need to go in chunks smaller than 1MB each byte[] buf = new byte[500000]; // approx 0.5MB int len = fis.read(buf); while (len != -1) { @@ -261,12 +324,43 @@ private static void uploadOpenlineageJar(WorkspaceClient workspace) { } @SneakyThrows - private static List fetchEventsEmitted(WorkspaceClient workspace) { - return workspace - .dbfs() - .readAllLines(Paths.get(DBFS_EVENTS_FILE), StandardCharsets.UTF_8) - .stream() - .map(event -> OpenLineageClientUtils.runEventFromJson(event)) + private static List fetchEventsEmitted( + WorkspaceClient workspace, String scriptName, String executionTimestamp) { + Path path = Paths.get(DBFS_EVENTS_FILE); + log.info("Fetching events from [{}]...", path); + + List eventsLines = workspace.dbfs().readAllLines(path, StandardCharsets.UTF_8); + log.info("There are [{}] events.", eventsLines.size()); + + saveEventsLocally(scriptName, executionTimestamp, eventsLines); + + return eventsLines.stream() + .map(OpenLineageClientUtils::runEventFromJson) .collect(Collectors.toList()); } + + /** Downloads the events locally for troubleshooting purposes */ + private static void saveEventsLocally( + String scriptName, String executionTimestamp, List lines) throws IOException { + // The source file path is reused and deleted before every test. As long as the tests are not + // executed concurrently, it should contain the events from the current test. + String eventsLocation = "./build/" + executionTimestamp + "-" + scriptName + "-events.ndjson"; + log.info("Fetching events to [{}]", eventsLocation); + writeLinesToFile(eventsLocation, lines); + log.info("Events fetched."); + } + + private static void writeLinesToFile(String eventsLocation, List lines) + throws IOException { + try (FileWriter fileWriter = new FileWriter(eventsLocation)) { + lines.forEach( + line -> { + try { + fileWriter.write(line + System.lineSeparator()); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java index ae49a3090c..aae54ce3e0 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DynamicParameter.java @@ -5,83 +5,85 @@ package io.openlineage.spark.agent; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; +import javax.annotation.Nullable; +import org.slf4j.Logger; /** - * Class for dynamic provisioning of parameters. In the current form it retrieves the values from - * the system properties. They can be set using -Dopenlineage.tests.parameterName=value when running - * the application. + * Represents a dynamically provisioned parameter whose value can be retrieved from system + * properties. Implementations of this interface define parameter names, prefixes, default values, + * and logging capabilities. + * + *

Parameters can be specified when running the application using the {@code -D} JVM argument + * syntax: {@code -D.=value}. + * + *

For example, if a parameter has a prefix of {@code openlineage.test} and a parameter name of + * {@code clusterId}, it can be set using {@code -Dopenlineage.test.clusterId=value}. + * + *

The {@link #resolve()} method retrieves the parameter's value from the system properties, + * returning the default value if the property is not set and a default is provided. */ -@Slf4j -@Getter -public enum DynamicParameter { - // DEVELOPMENT +public interface DynamicParameter { /** - * The ID of the EMR cluster if we want to use the existing one instead of creating a new one in - * the tests + * Returns the prefix applied to the parameter name when constructing the full system property + * key. The full key is formed by concatenating the prefix, a dot ('.'), and the parameter name. + * If the prefix is not necessary, this method may return {@code null}. + * + * @return the prefix string or {@code null} if no prefix is used */ - ClusterId("clusterId", ""), - PreventS3Cleanup("preventS3Cleanup", "false"), - PreventClusterTermination("preventClusterTermination", "false"), + @Nullable + String getPrefix(); + /** - * Determines which port can be used to debug the application. For debugging to work, make sure - * the EC2 subnet has the firewall rule, allowing you to access the master node using this port. - * You have to edit the EC2 security group the cluster is attached to and add the TCP inbound - * rule. Then you can use remote debugging option in your IDE (with this port and the master - * node's IP address) to attach session. If attaching seems to keep forever, it means that the - * firewall rule is not correct. If the server rejects the debugger's connection it means the - * application is not running yet, and you should repeat the attempt or make sure it is still - * running. You should run the cluster beforehand, note the master IP address and have the - * debugging session prepared before you attach the + * Returns the name of the parameter used when constructing the full system property key. This + * name is appended to the prefix (if any) to form the complete key. + * + * @return the parameter name */ - DebugPort("debugPort", "5005"), + String getParameterName(); - // CLUSTER - EmrLabel("emrLabel", "emr-7.2.0"), - EventsKeyPrefix("eventsKeyPrefix", "events"), - Ec2InstanceProfile("ec2InstanceProfile", "EMR_EC2_DefaultRole"), - ServiceRole("serviceRole", "EMR_DefaultRole"), - MasterInstanceType("masterInstanceType", "m4.large"), - SlaveInstanceType("slaveInstanceType", "m4.large"), - Ec2SubnetId("ec2SubnetId"), - /** The optional key pair which can be used to SSH to the cluster. Useful for troubleshooting. */ - SshKeyPairName("sshKeyPairName", ""), - IdleClusterTerminationSeconds("clusterIdleTerminationSeconds", "300"), + String name(); - /** The bucket where the tests keep the dependency jars, scripts, produced events, logs, etc */ - BucketName("bucketName"), /** - * The prefix where the tests will be run. Each test execution will have a separate random - * directory inside. + * Returns the {@link Logger} instance used for logging messages. + * + * @return the Logger instance */ - TestsKeyPrefix("testsKeyPrefix", "emr-integration-tests/test-"); - - private final String templateParameter; - private final String defaultValue; + Logger getLog(); - DynamicParameter(String templateParameter) { - this(templateParameter, null); - } - - DynamicParameter(String templateParameter, String defaultValue) { - this.templateParameter = templateParameter; - this.defaultValue = defaultValue; - } + /** + * Returns the default value of the parameter if it is not specified in the system properties. May + * return {@code null} if there is no default value. + * + * @return the default value or {@code null} if none is specified + */ + @Nullable + String getDefaultValue(); - String resolve() { - String key = "openlineage.tests." + getTemplateParameter(); - log.debug("Resolving parameter [{}] using key [{}]", name(), key); + /** + * Resolves the value of the parameter by retrieving it from the system properties using the + * constructed key. If the parameter is not found in the system properties and a default value is + * provided, the default value is returned. If the parameter is not found and no default value is + * provided, a {@link RuntimeException} is thrown. + * + * @return the resolved parameter value + * @throws RuntimeException if the parameter value is not found and no default value is provided + */ + default String resolve() { + // We can skip prefix in special cases where it is not used. + String prefix = getPrefix() != null ? getPrefix() + "." : ""; + String key = prefix + getParameterName(); + getLog().debug("Resolving parameter [{}] using key [{}]", name(), key); String resolved = System.getProperty(key); if (resolved != null) { return resolved; } else { - if (defaultValue != null) { - log.debug( - "The value for parameter [{}] has not been found. Using the default value [{}]", - key, - defaultValue); - return defaultValue; + if (getDefaultValue() != null) { + getLog() + .debug( + "The value for parameter [{}] has not been found. Using the default value [{}]", + key, + getDefaultValue()); + return getDefaultValue(); } } throw new RuntimeException( diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrDynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrDynamicParameter.java new file mode 100644 index 0000000000..d500df7d91 --- /dev/null +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrDynamicParameter.java @@ -0,0 +1,112 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +package io.openlineage.spark.agent; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.slf4j.Logger; + +/** + * Enumeration of dynamic parameters used in the EMR integration tests. Each parameter corresponds + * to a system property that can be set to customize test behavior. + * + *

Parameters can be specified when running the application using the {@code -D} JVM argument + * syntax: {@code -Dopenlineage.test.=value}. + * + *

For example, to set the EMR cluster ID, you can use: {@code + * -Dopenlineage.test.clusterId=your-cluster-id} + * + *

This enum includes parameters for development settings and cluster configuration, such as + * preventing S3 cleanup, specifying instance types, and setting the bucket name for test artifacts. + */ +@Slf4j +@Getter +public enum EmrDynamicParameter implements DynamicParameter { + + // DEVELOPMENT PARAMETERS + + /** + * The ID of the EMR cluster to use. If specified, the tests will use this existing cluster + * instead of creating a new one. + */ + ClusterId("clusterId", ""), + + /** + * When set to {@code true}, prevents the cleanup of S3 resources after tests complete. Useful for + * debugging purposes. + */ + PreventS3Cleanup("preventS3Cleanup", "false"), + + /** + * When set to {@code true}, prevents the EMR cluster from terminating after tests complete. This + * allows for manual inspection and debugging of the cluster state. + */ + PreventClusterTermination("preventClusterTermination", "false"), + + /** + * Determines which port can be used to debug the application. For debugging to work, make sure + * the EC2 subnet has the firewall rule, allowing you to access the master node using this port. + * You have to edit the EC2 security group the cluster is attached to and add the TCP inbound + * rule. Then you can use remote debugging option in your IDE (with this port and the master + * node's IP address) to attach session. If attaching seems to keep forever, it means that the + * firewall rule is not correct. If the server rejects the debugger's connection it means the + * application is not running yet, and you should repeat the attempt or make sure it is still + * running. You should run the cluster beforehand, note the master IP address and have the + * debugging session prepared before you attach the + */ + DebugPort("debugPort", "5005"), + + // CLUSTER + EmrLabel("emrLabel", "emr-7.2.0"), + + /** The S3 key prefix where event logs will be stored. */ + EventsKeyPrefix("eventsKeyPrefix", "events"), + + Ec2InstanceProfile("ec2InstanceProfile", "EMR_EC2_DefaultRole"), + + ServiceRole("serviceRole", "EMR_DefaultRole"), + + MasterInstanceType("masterInstanceType", "m4.large"), + + SlaveInstanceType("slaveInstanceType", "m4.large"), + + Ec2SubnetId("ec2SubnetId"), + + /** The optional key pair which can be used to SSH to the cluster. Useful for troubleshooting. */ + SshKeyPairName("sshKeyPairName", ""), + + IdleClusterTerminationSeconds("clusterIdleTerminationSeconds", "300"), + + /** + * The name of the S3 bucket where the tests store dependency JARs, scripts, produced events, + * logs, etc. + */ + BucketName("bucketName"), + /** + * The S3 key prefix under which the tests will be executed. Each test execution will have a + * separate random directory inside this prefix. + */ + TestsKeyPrefix("testsKeyPrefix", "emr-integration-tests/test-"); + + private final String parameterName; + private final String defaultValue; + private final String prefix; + + EmrDynamicParameter(String parameterName) { + this(parameterName, null); + } + + EmrDynamicParameter(String parameterName, String defaultValue) { + this.parameterName = parameterName; + this.defaultValue = defaultValue; + this.prefix = "openlineage.test"; + } + + @Override + public Logger getLog() { + return log; + } +} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java index ea643a6dde..cdfb04269c 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrIntegrationTest.java @@ -33,7 +33,7 @@ * methods like unit tests or Spark integration tests in the container. * *

To execute the tests, configure the required parameters using system properties (refer to - * {@link DynamicParameter} for more details). + * {@link EmrDynamicParameter} for more details). * *

PySpark Test Samples

* @@ -64,7 +64,7 @@ * *

All infrastructure details and configuration parameters should be set using system properties. * For example: {@code -Dopenlineage.tests.bucketName=my-bucket-name}. Most parameters have - * defaults. For a full list of configurable parameters, see {@link DynamicParameter}. + * defaults. For a full list of configurable parameters, see {@link EmrDynamicParameter}. * *

Note on JUnit and Gradle

* @@ -82,23 +82,23 @@ class EmrIntegrationTest { static { // Tests prefix with the date mark to tell when they were run in UTC String testsPrefix = - DynamicParameter.TestsKeyPrefix.resolve() + EmrDynamicParameter.TestsKeyPrefix.resolve() + ZonedDateTime.now(ZoneOffset.UTC) .format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")) + "/"; - String clusterId = DynamicParameter.ClusterId.resolve(); + String clusterId = EmrDynamicParameter.ClusterId.resolve(); EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster newCluster = "".equals(clusterId) ? EmrTestEnvironment.EmrTestEnvironmentProperties.NewCluster.builder() - .emrLabel(DynamicParameter.EmrLabel.resolve()) - .ec2InstanceProfile(DynamicParameter.Ec2InstanceProfile.resolve()) - .serviceRole(DynamicParameter.ServiceRole.resolve()) - .masterInstanceType(DynamicParameter.MasterInstanceType.resolve()) - .slaveInstanceType(DynamicParameter.SlaveInstanceType.resolve()) - .subnetId(DynamicParameter.Ec2SubnetId.resolve()) - .ec2SshKeyName(DynamicParameter.SshKeyPairName.resolve()) + .emrLabel(EmrDynamicParameter.EmrLabel.resolve()) + .ec2InstanceProfile(EmrDynamicParameter.Ec2InstanceProfile.resolve()) + .serviceRole(EmrDynamicParameter.ServiceRole.resolve()) + .masterInstanceType(EmrDynamicParameter.MasterInstanceType.resolve()) + .slaveInstanceType(EmrDynamicParameter.SlaveInstanceType.resolve()) + .subnetId(EmrDynamicParameter.Ec2SubnetId.resolve()) + .ec2SshKeyName(EmrDynamicParameter.SshKeyPairName.resolve()) .idleClusterTerminationSeconds( - Long.parseLong(DynamicParameter.IdleClusterTerminationSeconds.resolve())) + Long.parseLong(EmrDynamicParameter.IdleClusterTerminationSeconds.resolve())) .build() : null; emrTestParameters = @@ -108,13 +108,14 @@ class EmrIntegrationTest { // We can connect to the existing EMR cluster to speed up testing .clusterId(clusterId) .preventS3Cleanup( - Boolean.parseBoolean(DynamicParameter.PreventS3Cleanup.resolve())) + Boolean.parseBoolean(EmrDynamicParameter.PreventS3Cleanup.resolve())) .preventClusterTermination( - Boolean.parseBoolean(DynamicParameter.PreventClusterTermination.resolve())) - .debugPort(Integer.parseInt(DynamicParameter.DebugPort.resolve())) + Boolean.parseBoolean( + EmrDynamicParameter.PreventClusterTermination.resolve())) + .debugPort(Integer.parseInt(EmrDynamicParameter.DebugPort.resolve())) .build()) .cluster(newCluster) - .bucketName(DynamicParameter.BucketName.resolve()) + .bucketName(EmrDynamicParameter.BucketName.resolve()) .keyPrefix(testsPrefix) .build(); } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java index 356e74a9e4..21dd8a3c85 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/EmrTestEnvironment.java @@ -260,7 +260,7 @@ public void s3Cleanup() { if (properties.getDevelopment().isPreventS3Cleanup()) { log.info( "The [{}] flag has been enabled. Skipping S3 cleanup. Remember to remove it manually.", - DynamicParameter.PreventS3Cleanup.getTemplateParameter()); + EmrDynamicParameter.PreventS3Cleanup.getParameterName()); } else { log.info("Deleting the files under [{}]", properties.getKeyPrefix()); AwsUtils.deleteFiles(s3Client, properties.getBucketName(), properties.getKeyPrefix()); diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/ClusterLogConf.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/ClusterLogConf.java deleted file mode 100644 index 881b43b69e..0000000000 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/ClusterLogConf.java +++ /dev/null @@ -1,15 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark.agent.databricks; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -@Getter -@AllArgsConstructor -public class ClusterLogConf { - WorkspaceDestination dbfs; -} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/CreateCluster.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/CreateCluster.java deleted file mode 100644 index 8aa80f75a8..0000000000 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/CreateCluster.java +++ /dev/null @@ -1,25 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark.agent.databricks; - -import java.util.Map; -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; - -@Getter -@AllArgsConstructor -@Builder -public class CreateCluster { - String cluster_name; - String spark_version; - String node_type_id; - Long autotermination_minutes; - Long num_workers; - InitScript[] init_scripts; - Map spark_conf; - ClusterLogConf cluster_log_conf; -} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/InitScript.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/InitScript.java deleted file mode 100644 index 463f324487..0000000000 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/InitScript.java +++ /dev/null @@ -1,15 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark.agent.databricks; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -@Getter -@AllArgsConstructor -public class InitScript { - WorkspaceDestination workspace; -} diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/WorkspaceDestination.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/WorkspaceDestination.java deleted file mode 100644 index fab220d00d..0000000000 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/databricks/WorkspaceDestination.java +++ /dev/null @@ -1,15 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark.agent.databricks; - -import lombok.AllArgsConstructor; -import lombok.Getter; - -@Getter -@AllArgsConstructor -public class WorkspaceDestination { - String destination; -} From 75230b48383f48235eb9c466db1be2c12bd63e0b Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 23 Oct 2024 15:07:44 +0200 Subject: [PATCH 23/89] Add missing javadocs for spark-extension-interfaces (causing warnings) (#3165) Signed-off-by: Artur Owczarek --- .../extension/v1/DatasetWithDelegate.java | 13 +++++- .../extension/v1/DatasetWithIdentifier.java | 14 ++++++- .../v1/InputDatasetWithDelegate.java | 30 +++++++++++++- .../extension/v1/InputDatasetWithFacets.java | 22 ++++++++++ .../v1/InputDatasetWithIdentifier.java | 36 +++++++++++++++- .../shade/extension/v1/LineageRelation.java | 21 ++++++++-- .../extension/v1/LineageRelationProvider.java | 29 +++++++++---- .../spark/shade/extension/v1/OlExprId.java | 17 +++++++- .../v1/OutputDatasetWithDelegate.java | 35 +++++++++++++++- .../extension/v1/OutputDatasetWithFacets.java | 22 ++++++++++ .../v1/OutputDatasetWithIdentifier.java | 36 +++++++++++++++- .../SparkOpenLineageExtensionVisitor.java | 41 ++++++++++++++++++- 12 files changed, 297 insertions(+), 19 deletions(-) diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithDelegate.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithDelegate.java index 5bf1864436..3faf5714bc 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithDelegate.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithDelegate.java @@ -4,7 +4,18 @@ */ package io.openlineage.spark.shade.extension.v1; -/** Dataset with a node in LogicalPlan where a input dataset shall be extracted from */ +/** + * Represents a dataset associated with a node in a Spark LogicalPlan. + * + *

Implementing classes should provide a method to retrieve the node from which an input or + * output dataset can be extracted. This is used to capture lineage information by identifying the + * node in the Spark LogicalPlan that corresponds to the dataset. + */ interface DatasetWithDelegate { + /** + * Returns the node in the LogicalPlan from which the dataset is extracted. + * + * @return the node object representing the location in the LogicalPlan + */ Object getNode(); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithIdentifier.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithIdentifier.java index a288bb6736..4f3fa74f4f 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithIdentifier.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/DatasetWithIdentifier.java @@ -6,7 +6,19 @@ import io.openlineage.client.utils.DatasetIdentifier; -/** Dataset with an identifier containing namespace and name */ +/** + * Represents a dataset with an identifier that includes the dataset's namespace and name. + * + *

Implementing classes should provide a method to retrieve the {@link DatasetIdentifier}, which + * encapsulates the dataset's unique namespace and name. This identifier follows the naming + * conventions outlined in the OpenLineage Naming + * Specification, ensuring consistency across datasets for lineage tracking and data cataloging. + */ interface DatasetWithIdentifier { + /** + * Returns the {@link DatasetIdentifier}, which contains the namespace and name of the dataset. + * + * @return the dataset identifier containing the namespace and name + */ DatasetIdentifier getDatasetIdentifier(); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithDelegate.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithDelegate.java index 0b94304e1b..94559656f6 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithDelegate.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithDelegate.java @@ -8,13 +8,26 @@ import io.openlineage.client.OpenLineage.InputDatasetInputFacetsBuilder; import java.util.Objects; -/** Input dataset with a node in LogicalPlan where a input dataset shall be extracted from */ +/** + * Represents an input dataset associated with a node in a LogicalPlan. This class allows the + * extraction of facets (metadata) from the input dataset. + * + *

It implements both {@link InputDatasetWithFacets} and {@link DatasetWithDelegate}, providing + * methods to retrieve dataset facets and the node from which the dataset is extracted. + */ public class InputDatasetWithDelegate implements InputDatasetWithFacets, DatasetWithDelegate { private final Object node; private final DatasetFacetsBuilder datasetFacetsBuilder; private final InputDatasetInputFacetsBuilder inputFacetsBuilder; + /** + * Constructs a new {@code InputDatasetWithDelegate}. + * + * @param node the node in the LogicalPlan from which the input dataset is extracted + * @param datasetFacetsBuilder a builder for the dataset facets + * @param inputFacetsBuilder a builder for the input dataset input facets + */ public InputDatasetWithDelegate( Object node, DatasetFacetsBuilder datasetFacetsBuilder, @@ -24,16 +37,31 @@ public InputDatasetWithDelegate( this.inputFacetsBuilder = inputFacetsBuilder; } + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + * @return the dataset facets builder + */ @Override public DatasetFacetsBuilder getDatasetFacetsBuilder() { return datasetFacetsBuilder; } + /** + * Returns the {@link InputDatasetInputFacetsBuilder} for building input dataset facets. + * + * @return the input dataset input facets builder + */ @Override public InputDatasetInputFacetsBuilder getInputFacetsBuilder() { return inputFacetsBuilder; } + /** + * Returns the node in the LogicalPlan from which the input dataset is extracted. + * + * @return the LogicalPlan node + */ @Override public Object getNode() { return node; diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithFacets.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithFacets.java index 9af8d238da..20e8743738 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithFacets.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithFacets.java @@ -7,8 +7,30 @@ import io.openlineage.client.OpenLineage.DatasetFacetsBuilder; import io.openlineage.client.OpenLineage.InputDatasetInputFacetsBuilder; +/** + * Interface representing an input dataset with associated facets (metadata). + * + *

Classes implementing this interface provide methods to retrieve builders for both dataset + * facets and input dataset input facets. These facets capture metadata associated with the input + * dataset. + */ public interface InputDatasetWithFacets { + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + *

Dataset facets include general metadata associated with the dataset. + * + * @return the dataset facets builder + */ DatasetFacetsBuilder getDatasetFacetsBuilder(); + /** + * Returns the {@link InputDatasetInputFacetsBuilder} for building input dataset facets. + * + *

Input dataset facets include specific metadata related to the input datasets that are being + * used. + * + * @return the input dataset input facets builder + */ InputDatasetInputFacetsBuilder getInputFacetsBuilder(); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithIdentifier.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithIdentifier.java index cf4f42f623..b748e7b2cf 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithIdentifier.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/InputDatasetWithIdentifier.java @@ -9,13 +9,27 @@ import io.openlineage.client.utils.DatasetIdentifier; import java.util.Objects; -/** Input dataset with an identifier containing namespace and name */ +/** + * Represents an input dataset with an associated {@link DatasetIdentifier} containing the dataset's + * namespace and name. + * + *

This class provides methods to retrieve the dataset's identifier, as well as builders for the + * dataset's facets and input dataset input facets. It implements both {@link + * InputDatasetWithFacets} and {@link DatasetWithIdentifier}. + */ public class InputDatasetWithIdentifier implements InputDatasetWithFacets, DatasetWithIdentifier { private final DatasetIdentifier datasetIdentifier; private final DatasetFacetsBuilder facetsBuilder; private final InputDatasetInputFacetsBuilder inputFacetsBuilder; + /** + * Constructs a new {@code InputDatasetWithIdentifier}. + * + * @param datasetIdentifier the identifier of the dataset, containing its namespace and name + * @param facetsBuilder a builder for the dataset facets + * @param inputFacetsBuilder a builder for the input dataset input facets + */ public InputDatasetWithIdentifier( DatasetIdentifier datasetIdentifier, DatasetFacetsBuilder facetsBuilder, @@ -25,16 +39,36 @@ public InputDatasetWithIdentifier( this.inputFacetsBuilder = inputFacetsBuilder; } + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + *

Dataset facets include general metadata associated with the dataset. + * + * @return the dataset facets builder + */ @Override public DatasetFacetsBuilder getDatasetFacetsBuilder() { return facetsBuilder; } + /** + * Returns the {@link InputDatasetInputFacetsBuilder} for building input dataset input facets. + * + *

Input dataset facets include specific metadata related to the input datasets that are being + * used like data quality metrics. + * + * @return the input dataset input facets builder + */ @Override public InputDatasetInputFacetsBuilder getInputFacetsBuilder() { return inputFacetsBuilder; } + /** + * Returns the {@link DatasetIdentifier} that contains the dataset's namespace and name. + * + * @return the dataset identifier + */ @Override public DatasetIdentifier getDatasetIdentifier() { return datasetIdentifier; diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelation.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelation.java index 820db90723..f49a1d631e 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelation.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelation.java @@ -8,11 +8,26 @@ import io.openlineage.client.utils.DatasetIdentifier; /** - * Interface to be implemented for extension's classes extending - * `org.apache.spark.sql.sources.BaseRelation`. Implementing it allows extracting lineage from such - * objects. Implementing `getNamespace` and `getName` within the `DatasetIdentifier` is obligatory. + * Interface to be implemented by extension classes that extend {@code + * org.apache.spark.sql.sources.BaseRelation}. + * + *

Implementing this interface allows for the extraction of lineage information from {@code + * BaseRelation} objects. The methods {@code getNamespace} and {@code getName}, provided by {@link + * DatasetIdentifier}, must be implemented by the classes that implement this interface. This + * identifier must follow the naming conventions outlined in the OpenLineage Naming Specification, ensuring + * consistency across datasets for lineage tracking and data cataloging. */ public interface LineageRelation { + /** + * Returns a {@link DatasetIdentifier} containing the namespace and name of the dataset for + * lineage tracking purposes. + * + * @param sparkListenerEventName the name of the Spark listener event triggering the lineage + * extraction + * @param openLineage an instance of {@link OpenLineage} used for lineage-related operations + * @return a {@link DatasetIdentifier} representing the dataset associated with the event + */ DatasetIdentifier getLineageDatasetIdentifier( String sparkListenerEventName, OpenLineage openLineage); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelationProvider.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelationProvider.java index ecd9841f03..20e64d8abc 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelationProvider.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/LineageRelationProvider.java @@ -8,17 +8,32 @@ import io.openlineage.client.utils.DatasetIdentifier; /** - * Interface for classes implementing `org.apache.spark.sql.sources.RelationProvider`. - * `RelationProvider` implements `createRelation` with `SQLContext` and `parameters` as arguments. - * We want this package to not depend on Spark's code which may be different across Spark versions. + * Interface for classes implementing {@code org.apache.spark.sql.sources.RelationProvider}. * - *

We're aiming to have arguments of `getLineageDataset` the same as arguments of - * `createRelation` within `RelationProvider`. When implementing this method, one can provide two - * implementations: one with arguments exactly the same as with `RelationProvider`, and other - * throwing an exception which should never be called. + *

The {@code RelationProvider} interface defines the {@code createRelation} method, which takes + * {@code SQLContext} and {@code parameters} as arguments. This interface enables lineage extraction + * from relation providers without directly depending on Spark's code, as the code may vary across + * different Spark versions. + * + *

To align with the {@code createRelation} method, the {@code getLineageDatasetIdentifier} + * method in this interface is designed to accept similar arguments. When implementing this method, + * classes can provide two versions: one that matches the arguments of {@code createRelation}, and + * another that throws an exception if it is ever called, ensuring compatibility across different + * implementations. */ public interface LineageRelationProvider { + /** + * Returns a {@link DatasetIdentifier} containing the namespace and name of the dataset for + * lineage tracking purposes. + * + * @param sparkListenerEventName the name of the Spark listener event triggering the lineage + * extraction + * @param openLineage an instance of {@link OpenLineage} used for lineage-related operations + * @param sqlContext the SQL context, typically used in Spark SQL queries + * @param parameters the parameters used by the relation provider to create the relation + * @return a {@link DatasetIdentifier} representing the dataset associated with the event + */ DatasetIdentifier getLineageDatasetIdentifier( String sparkListenerEventName, OpenLineage openLineage, Object sqlContext, Object parameters); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OlExprId.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OlExprId.java index 38e578c3ec..5983cf4d42 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OlExprId.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OlExprId.java @@ -7,7 +7,12 @@ import java.util.Objects; /** - * Class to contain reference to Spark's ExprId without adding dependency to Spark library + * A class to hold a reference to Spark's {@code ExprId} without introducing a dependency on the + * Spark library. + * + *

This class serves as a lightweight alternative for storing {@code ExprId}, which is used in + * Spark's expression identifiers, while avoiding direct integration with Spark's internal + * libraries. * * @see namedExpressions.scala @@ -16,10 +21,20 @@ public final class OlExprId { private final Long exprId; + /** + * Constructs a new {@code OlExprId} with the specified expression identifier. + * + * @param exprId the expression identifier + */ public OlExprId(Long exprId) { this.exprId = exprId; } + /** + * Returns the expression identifier. + * + * @return the expression identifier as a {@link Long} + */ public Long getExprId() { return exprId; } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithDelegate.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithDelegate.java index 337777d1c1..4061b5054d 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithDelegate.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithDelegate.java @@ -8,13 +8,27 @@ import io.openlineage.client.OpenLineage.OutputDatasetOutputFacetsBuilder; import java.util.Objects; -/** Output dataset with a node in LogicalPlan where a input dataset shall be extracted from */ +/** + * Represents an output dataset associated with a node in a LogicalPlan. This class allows for the + * extraction of metadata (facets) from output datasets. + * + *

It implements both {@link OutputDatasetWithFacets} and {@link DatasetWithDelegate}, providing + * methods to retrieve dataset facets, output dataset facets, and the node from which the dataset is + * extracted. + */ public class OutputDatasetWithDelegate implements OutputDatasetWithFacets, DatasetWithDelegate { private final Object node; private final DatasetFacetsBuilder datasetFacetsBuilder; private final OutputDatasetOutputFacetsBuilder outputFacetsBuilder; + /** + * Constructs a new {@code OutputDatasetWithDelegate}. + * + * @param node the node in the LogicalPlan from which the output dataset is extracted + * @param datasetFacetsBuilder a builder for the dataset facets + * @param outputFacetsBuilder a builder for the output dataset output facets + */ public OutputDatasetWithDelegate( Object node, DatasetFacetsBuilder datasetFacetsBuilder, @@ -24,16 +38,35 @@ public OutputDatasetWithDelegate( this.outputFacetsBuilder = outputFacetsBuilder; } + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + *

Dataset facets include general metadata associated with the dataset. + * + * @return the dataset facets builder + */ @Override public DatasetFacetsBuilder getDatasetFacetsBuilder() { return datasetFacetsBuilder; } + /** + * Returns the {@link OutputDatasetOutputFacetsBuilder} for building output dataset facets. + * + *

Output dataset facets include specific metadata related to the output datasets. + * + * @return the output dataset facets builder + */ @Override public OutputDatasetOutputFacetsBuilder getOutputFacetsBuilder() { return outputFacetsBuilder; } + /** + * Returns the node in the LogicalPlan from which the output dataset is extracted. + * + * @return the LogicalPlan node + */ @Override public Object getNode() { return node; diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithFacets.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithFacets.java index 425f274b43..27df5984cf 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithFacets.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithFacets.java @@ -7,8 +7,30 @@ import io.openlineage.client.OpenLineage.DatasetFacetsBuilder; import io.openlineage.client.OpenLineage.OutputDatasetOutputFacetsBuilder; +/** + * Interface representing an output dataset with associated facets (metadata). + * + *

Classes implementing this interface provide methods to retrieve builders for both dataset + * facets and output dataset facets. These facets capture metadata associated with the output + * dataset. + */ public interface OutputDatasetWithFacets { + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + *

Dataset facets include general metadata associated with the dataset. + * + * @return the dataset facets builder + */ DatasetFacetsBuilder getDatasetFacetsBuilder(); + /** + * Returns the {@link OutputDatasetOutputFacetsBuilder} for building output dataset facets. + * + *

Output dataset facets include specific metadata related to the output datasets being + * written. + * + * @return the output dataset facets builder + */ OutputDatasetOutputFacetsBuilder getOutputFacetsBuilder(); } diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithIdentifier.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithIdentifier.java index 54171d5174..c0073289a5 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithIdentifier.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/OutputDatasetWithIdentifier.java @@ -9,13 +9,27 @@ import io.openlineage.client.utils.DatasetIdentifier; import java.util.Objects; -/** Output dataset with an identifier containing namespace and name */ +/** + * Represents an output dataset associated with an identifier that includes the dataset's namespace + * and name. + * + *

This class provides methods to retrieve the dataset's identifier, as well as builders for the + * dataset's facets and output dataset facets. It implements both {@link OutputDatasetWithFacets} + * and {@link DatasetWithIdentifier}. + */ public class OutputDatasetWithIdentifier implements OutputDatasetWithFacets, DatasetWithIdentifier { private final DatasetIdentifier datasetIdentifier; private final DatasetFacetsBuilder facetsBuilder; private final OutputDatasetOutputFacetsBuilder outputFacetsBuilder; + /** + * Constructs a new {@code OutputDatasetWithIdentifier}. + * + * @param datasetIdentifier the identifier of the dataset, containing its namespace and name + * @param facetsBuilder a builder for the dataset facets + * @param outputFacetsBuilder a builder for the output dataset facets + */ public OutputDatasetWithIdentifier( DatasetIdentifier datasetIdentifier, DatasetFacetsBuilder facetsBuilder, @@ -25,16 +39,36 @@ public OutputDatasetWithIdentifier( this.outputFacetsBuilder = outputFacetsBuilder; } + /** + * Returns the {@link DatasetFacetsBuilder} for building dataset facets. + * + *

Dataset facets include general metadata associated with the dataset. + * + * @return the dataset facets builder + */ @Override public DatasetFacetsBuilder getDatasetFacetsBuilder() { return facetsBuilder; } + /** + * Returns the {@link OutputDatasetOutputFacetsBuilder} for building output dataset facets. + * + *

Output dataset facets include specific metadata related to the output datasets that are + * being written. + * + * @return the output dataset facets builder + */ @Override public OutputDatasetOutputFacetsBuilder getOutputFacetsBuilder() { return outputFacetsBuilder; } + /** + * Returns the {@link DatasetIdentifier} that contains the dataset's namespace and name. + * + * @return the dataset identifier + */ @Override public DatasetIdentifier getDatasetIdentifier() { return datasetIdentifier; diff --git a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/lifecycle/plan/SparkOpenLineageExtensionVisitor.java b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/lifecycle/plan/SparkOpenLineageExtensionVisitor.java index c719af1656..147e6d127e 100644 --- a/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/lifecycle/plan/SparkOpenLineageExtensionVisitor.java +++ b/integration/spark-extension-interfaces/src/main/java/io/openlineage/spark/shade/extension/v1/lifecycle/plan/SparkOpenLineageExtensionVisitor.java @@ -32,13 +32,26 @@ import java.util.stream.Collectors; /** - * This class serves as a container that wraps all the interface method calls exposed by this - * package. The openlineage-spark accesses these wrapper methods through reflection. + * This class serves as a visitor that wraps method calls for handling input and output lineage in + * Spark jobs, as defined in the OpenLineage-Spark extension. + * + *

The OpenLineage-Spark library uses reflection to access these wrapper methods for extracting + * lineage information from Spark's LogicalPlan and other relevant components. The visitor class + * handles different types of lineage nodes, such as {@link InputLineageNode} and {@link + * OutputLineageNode}, and allows conversion to a format suitable for lineage tracking. */ public final class SparkOpenLineageExtensionVisitor { private static final ObjectMapper mapper = OpenLineageClientUtils.newObjectMapper(); private final OpenLineage openLineage = new OpenLineage(Versions.OPEN_LINEAGE_PRODUCER_URI); + /** + * Determines if the given {@code lineageNode} is of a type that this visitor can process. + * Specifically, it checks if the object is an instance of {@link LineageRelationProvider}, {@link + * LineageRelation}, {@link InputLineageNode}, or {@link OutputLineageNode}. + * + * @param lineageNode the node representing a lineage component + * @return {@code true} if the node is of a supported type, {@code false} otherwise + */ public boolean isDefinedAt(Object lineageNode) { return lineageNode instanceof LineageRelationProvider || lineageNode instanceof LineageRelation @@ -46,6 +59,16 @@ public boolean isDefinedAt(Object lineageNode) { || lineageNode instanceof OutputLineageNode; } + /** + * Applies the visitor to a {@link LineageRelationProvider}, extracting lineage information such + * as the {@link DatasetIdentifier} from the provided {@code lineageNode}. + * + * @param lineageNode the lineage node to process + * @param sparkListenerEventName the name of the Spark listener event + * @param sqlContext the SQL context of the current Spark execution + * @param parameters additional parameters relevant to the lineage extraction + * @return a map containing lineage information in a serialized format + */ public Map apply( Object lineageNode, String sparkListenerEventName, Object sqlContext, Object parameters) { if (lineageNode instanceof LineageRelationProvider) { @@ -58,6 +81,14 @@ public Map apply( return Collections.emptyMap(); } + /** + * Applies the visitor to a {@link LineageRelation}, {@link InputLineageNode}, or {@link + * OutputLineageNode}, extracting and serializing the relevant lineage information. + * + * @param lineageNode the lineage node to process + * @param sparkListenerEventName the name of the Spark listener event + * @return a map containing the serialized lineage data + */ public Map apply(Object lineageNode, String sparkListenerEventName) { if (lineageNode instanceof LineageRelation) { LineageRelation lineageRelation = (LineageRelation) lineageNode; @@ -143,6 +174,7 @@ private static Map buildMapWithDatasetsAndDelegates( return map; } + /** Utility class to handle versioning for the OpenLineage producer URI. */ private static class Versions { public static final URI OPEN_LINEAGE_PRODUCER_URI = getProducerUri(); @@ -153,6 +185,11 @@ private static URI getProducerUri() { getVersion())); } + /** + * Retrieves the version information from a properties file. + * + * @return the version string, or "main" if the version cannot be determined + */ @SuppressWarnings("PMD") public static String getVersion() { try { From e491670f2dbe97225173a0df271ac7331695b481 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 23 Oct 2024 17:50:55 +0200 Subject: [PATCH 24/89] Fix Databricks integration running (#3178) Signed-off-by: Artur Owczarek --- .circleci/continue_config.yml | 2 +- integration/spark/app/build.gradle | 35 +++++++------------ .../agent/DatabricksDynamicParameter.java | 2 +- 3 files changed, 15 insertions(+), 24 deletions(-) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index c91f47d629..4eee41d540 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -876,7 +876,7 @@ jobs: sudo update-alternatives --set java ${JAVA_BIN} sudo update-alternatives --set javac ${JAVAC_BIN} - run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME} - - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -Popenlineage.tests.databricks.host=$DATABRICKS_HOST -Popenlineage.tests.databricks.token=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME} + - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -Dopenlineage.tests.databricks.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.token=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME} - store_test_results: path: app/build/test-results/databricksIntegrationTest - store_artifacts: diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index 6332c7d773..d0233cb997 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -30,9 +30,9 @@ ext { postgresqlVersion = '42.7.4' testcontainersVersion = '1.20.2' configurableTestConfig = [ - sparkConfFile: project.findProperty('spark.conf.file') ?: System.getProperty('spark.conf.file'), - hostDir: project.findProperty('host.dir') ?: System.getProperty('host.dir'), - testDir: project.findProperty('test.dir') ?: System.getProperty('test.dir') + sparkConfFile: project.findProperty('spark.conf.file') ?: System.getProperty('spark.conf.file'), + hostDir : project.findProperty('host.dir') ?: System.getProperty('host.dir'), + testDir : project.findProperty('test.dir') ?: System.getProperty('test.dir') ] micrometerVersion = '1.13.5' } @@ -172,8 +172,17 @@ final def testResourcesDir = buildDirectory.dir("resources/test") final def libsShadowDir = buildDirectory.dir("libs/shadow") final def scalaFixturesJarName = "openlineage-spark-scala-fixtures_${scala}-${project.version}.jar" +/** + The "test" task runs JUnit tests in a separate JVM, and by default, system properties (-D parameters) + are not forwarded to it. + Here we selectively create and pass properties that are used in the tests. + Apart from special technical properties, we pass all the system properties that start with "openlineage". + They are commonly used in {@link io.openlineage.spark.agent.DynamicParameter} interface implementations + to accept parameters. + */ def testSystemProperties = { String spark, String scala -> - [ + Map openLineageSystemProperties = System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } + openLineageSystemProperties + [ "additional.conf.dir" : destAdditionalConfDir.get().asFile.absolutePath, "additional.jars.dir" : destAdditionalJarsDir.get().asFile.absolutePath, "build.dir" : buildDirectory.get().asFile.absolutePath, @@ -406,23 +415,8 @@ tasks.register("databricksIntegrationTest", Test) { useJUnitPlatform { includeTags("databricks") } - options { - systemProperties = openLineageSystemProperties() - } } -/** - The "test" task runs JUnit tests in a separate JVM, and by default, system properties (-D parameters) - are not forwarded to it. Gradle allows forwarding them with -P parameter, but not everyone is aware of that. - Here, we selectively pass only properties that start with "openlineage" to avoid conflicts with - environment-related properties (e.g., current working directory) that could affect resource discovery. - The properties are later used {@link io.openlineage.spark.agent.DynamicParameter} - */ -private Map openLineageSystemProperties() { - System.getProperties().findAll { key, value -> key.toString().startsWith("openlineage") } -} - - tasks.register("awsIntegrationTest", Test) { group = "verification" dependsOn(integrationTestDependencies) @@ -431,9 +425,6 @@ tasks.register("awsIntegrationTest", Test) { useJUnitPlatform { includeTags("aws") } - options { - systemProperties = openLineageSystemProperties() - } testLogging { exceptionFormat "full" } diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java index 14de73fa30..0db8b32f41 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java @@ -42,7 +42,7 @@ public enum DatabricksDynamicParameter implements DynamicParameter { // CLUSTER PARAMETERS /** The Spark version as provided by Gradle. This case is not using the openlineage prefix. */ - SparkVersion("spark.version", null, "3.5.2"); + SparkVersion("spark.version", null, null); private final String parameterName; private final String defaultValue; From a967c48955922b8cdb30917c94b826dbd91af275 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 23 Oct 2024 17:54:12 +0200 Subject: [PATCH 25/89] refactor: Few changes in OpenLineageSparkListener (#3179) Signed-off-by: Artur Owczarek --- .../spark/agent/OpenLineageSparkListener.java | 34 +++++++++++-------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/integration/spark/app/src/main/java/io/openlineage/spark/agent/OpenLineageSparkListener.java b/integration/spark/app/src/main/java/io/openlineage/spark/agent/OpenLineageSparkListener.java index 6ec14073d4..17405f6779 100644 --- a/integration/spark/app/src/main/java/io/openlineage/spark/agent/OpenLineageSparkListener.java +++ b/integration/spark/app/src/main/java/io/openlineage/spark/agent/OpenLineageSparkListener.java @@ -62,9 +62,9 @@ public class OpenLineageSparkListener extends org.apache.spark.scheduler.SparkLi Collections.synchronizedMap(new HashMap<>()); private static final Map rddExecutionRegistry = Collections.synchronizedMap(new HashMap<>()); - private static WeakHashMap, Configuration> outputs = new WeakHashMap<>(); + private static final WeakHashMap, Configuration> outputs = new WeakHashMap<>(); private static ContextFactory contextFactory; - private static JobMetricsHolder jobMetrics = JobMetricsHolder.getInstance(); + private static final JobMetricsHolder jobMetrics = JobMetricsHolder.getInstance(); private static final Function1 sparkContextFromSession = ScalaConversionUtils.toScalaFn(SparkSession::sparkContext); private static final Function0> activeSparkContext = @@ -74,7 +74,7 @@ public class OpenLineageSparkListener extends org.apache.spark.scheduler.SparkLi private static MeterRegistry meterRegistry; - private static String sparkVersion = package$.MODULE$.SPARK_VERSION(); + private static final String sparkVersion = package$.MODULE$.SPARK_VERSION(); private final boolean isDisabled = checkIfDisabled(); @@ -101,10 +101,13 @@ public void onOtherEvent(SparkListenerEvent event) { if (isDisabled) { return; } - initializeContextFactoryIfNotInitialized(); if (event instanceof SparkListenerSQLExecutionStart) { + initializeContextFactoryIfNotInitialized(); + log.debug("onOtherEvent called with event type SparkListenerSQLExecutionStart: [{}].", event); sparkSQLExecStart((SparkListenerSQLExecutionStart) event); } else if (event instanceof SparkListenerSQLExecutionEnd) { + initializeContextFactoryIfNotInitialized(); + log.debug("onOtherEvent called with event type SparkListenerSQLExecutionEnd: [{}].", event); sparkSQLExecEnd((SparkListenerSQLExecutionEnd) event); } } @@ -117,7 +120,7 @@ private void sparkSQLExecStart(SparkListenerSQLExecutionStart startEvent) { meterRegistry.counter("openlineage.spark.event.sql.start").increment(); circuitBreaker.run( () -> { - activeJobId.ifPresent(id -> context.setActiveJobId(id)); + activeJobId.ifPresent(context::setActiveJobId); context.start(startEvent); return null; }); @@ -132,7 +135,7 @@ private void sparkSQLExecEnd(SparkListenerSQLExecutionEnd endEvent) { if (context != null) { circuitBreaker.run( () -> { - activeJobId.ifPresent(id -> context.setActiveJobId(id)); + activeJobId.ifPresent(context::setActiveJobId); context.end(endEvent); return null; }); @@ -143,7 +146,7 @@ private void sparkSQLExecEnd(SparkListenerSQLExecutionEnd endEvent) { c -> circuitBreaker.run( () -> { - activeJobId.ifPresent(id -> c.setActiveJobId(id)); + activeJobId.ifPresent(c::setActiveJobId); c.end(endEvent); return null; })); @@ -156,8 +159,8 @@ public void onJobStart(SparkListenerJobStart jobStart) { if (isDisabled) { return; } + log.debug("onJobStart called [{}].", jobStart); activeJobId = Optional.of(jobStart.jobId()); - log.debug("onJobStart called {}", jobStart); initializeContextFactoryIfNotInitialized(); meterRegistry.counter("openlineage.spark.event.job.start").increment(); Optional activeJob = @@ -218,6 +221,7 @@ public void onJobEnd(SparkListenerJobEnd jobEnd) { if (isDisabled) { return; } + log.debug("onJobEnd called [{}].", jobEnd); ExecutionContext context = rddExecutionRegistry.remove(jobEnd.jobId()); meterRegistry.counter("openlineage.spark.event.job.end").increment(); circuitBreaker.run( @@ -237,11 +241,11 @@ public void onTaskEnd(SparkListenerTaskEnd taskEnd) { if (isDisabled || sparkVersion.startsWith("2")) { return; } - log.debug("onTaskEnd {}", taskEnd); + log.debug("onTaskEnd called [{}].", taskEnd); jobMetrics.addMetrics(taskEnd.stageId(), taskEnd.taskMetrics()); } - public static ExecutionContext getSparkApplicationExecutionContext() { + private static ExecutionContext getSparkApplicationExecutionContext() { Optional sparkContext = asJavaOptional( SparkSession.getDefaultSession() @@ -250,20 +254,20 @@ public static ExecutionContext getSparkApplicationExecutionContext() { return contextFactory.createSparkApplicationExecutionContext(sparkContext.orElse(null)); } - public static Optional getSparkSQLExecutionContext(long executionId) { + private static Optional getSparkSQLExecutionContext(long executionId) { return Optional.ofNullable( sparkSqlExecutionRegistry.computeIfAbsent( executionId, (e) -> contextFactory.createSparkSQLExecutionContext(executionId).orElse(null))); } - public static Optional getExecutionContext(int jobId) { + private static Optional getExecutionContext(int jobId) { return Optional.ofNullable( rddExecutionRegistry.computeIfAbsent( jobId, (e) -> contextFactory.createRddExecutionContext(jobId))); } - public static Optional getExecutionContext(int jobId, long executionId) { + private static Optional getExecutionContext(int jobId, long executionId) { Optional executionContext = getSparkSQLExecutionContext(executionId); executionContext.ifPresent(context -> rddExecutionRegistry.put(jobId, context)); return executionContext; @@ -284,6 +288,7 @@ public void onApplicationEnd(SparkListenerApplicationEnd applicationEnd) { if (isDisabled) { return; } + log.debug("onApplicationEnd called [{}].", applicationEnd); meterRegistry.counter("openlineage.spark.event.app.end").increment(); meterRegistry .counter("openlineage.spark.event.app.end.memoryusage") @@ -308,6 +313,7 @@ public void onApplicationStart(SparkListenerApplicationStart applicationStart) { if (isDisabled) { return; } + log.debug("onApplicationStart called [{}].", applicationStart); initializeContextFactoryIfNotInitialized(applicationStart.appName()); meterRegistry.counter("openlineage.spark.event.app.start").increment(); meterRegistry @@ -358,7 +364,7 @@ private void initializeContextFactoryIfNotInitialized(SparkConf sparkConf, Strin } } - private static void initializeMetrics(OpenLineageConfig openLineageConfig) { + private static void initializeMetrics(OpenLineageConfig openLineageConfig) { meterRegistry = MicrometerProvider.addMeterRegistryFromConfig(openLineageConfig.getMetricsConfig()); String disabledFacets; From 7f0fc079891d95edcfba1d71284b5d8101307660 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Thu, 24 Oct 2024 13:17:29 +0200 Subject: [PATCH 26/89] ci: use GH label to filter out jobs (#3045) Signed-off-by: Maciej Obuchowski --- .circleci/config.yml | 32 +++------ .circleci/continue_config.yml | 81 ++++++++++------------- .circleci/workflows/openlineage-spark.yml | 49 +++++++------- dev/filter_approvals.py | 34 ++++++++++ dev/filter_matrix.py | 26 ++++++++ 5 files changed, 132 insertions(+), 90 deletions(-) create mode 100644 dev/filter_approvals.py create mode 100644 dev/filter_matrix.py diff --git a/.circleci/config.yml b/.circleci/config.yml index f4b8943ea3..dcb4ff3089 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,6 +6,7 @@ setup: true # the path of an updated fileset orbs: continuation: circleci/continuation@1.0.0 + github-cli: circleci/github-cli@2.4.0 # optional parameter when triggering to # only run a particular type of integration @@ -51,6 +52,7 @@ jobs: - image: cimg/python:3.8 steps: - checkout + - github-cli/setup - run: name: Install yq command: | @@ -149,27 +151,14 @@ jobs: - run: name: Remove approval steps if not pull from forks. command: | - pip install pyyaml==6.0.1 - python -c "import yaml - d = yaml.safe_load(open('complete_config.yml')) - for workflow_name, workflow_definition in d['workflows'].items(): - jobs = workflow_definition.get('jobs') if isinstance(workflow_definition, dict) else None - if not jobs: continue - - # find all approvals - approvals = list(filter(lambda x: isinstance(x, dict) and list(x.values())[0].get('type') == 'approval', jobs)) - for approval in approvals: - approval_name = next(iter(approval)) - approval_upstreams = approval[approval_name].get('requires') - approval_downstream = list(filter(lambda x: isinstance(x, dict) and approval_name in list(x.values())[0].get('requires', ''), jobs)) - # replace approval with its upstream jobs - for job in approval_downstream: - requires = next(iter(job.values()))['requires'] - requires.remove(approval_name) - requires.extend(approval_upstreams) - jobs.remove(approval) - with open('complete_config.yml', 'w') as f: - f.write(yaml.dump(d, sort_keys=False))" + pip install pyyaml==6.0.1 + python dev/filter_approvals.py + - run: | + export IS_FULL_TESTS=$(gh pr view --json labels | jq 'any(.currentBranch.labels[]; .name == "full-tests")') + echo $IS_FULL_TESTS + if [ -z "$IS_FULL_TESTS" ] || [ "$IS_FULL_TESTS" == "0" ]; then + python dev/filter_matrix.py + fi - when: condition: or: @@ -194,6 +183,7 @@ workflows: schedule_workflow: jobs: - determine_changed_modules: + context: pr filters: tags: only: /^[0-9]+(\.[0-9]+){2}(-rc\.[0-9]+)?$/ diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index 4eee41d540..61aea0af1a 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -75,7 +75,32 @@ commands: echo "Setting default Java to ${JAVA_BIN}" sudo update-alternatives --set java ${JAVA_BIN} sudo update-alternatives --set javac ${JAVAC_BIN} - + set_java_spark_scala_version: + parameters: + env-variant: + type: string + description: "Set Java, Spark and Scala versions" + steps: + - run: | + set -eux + JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2) + SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2) + SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2) + echo spark=$SPARK java=$JAVA scala=$SCALA + JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64' + JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64' + JAVA_BIN=$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java") + JAVAC_BIN=$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac") + + echo 'export JAVA17_HOME="/usr/lib/jvm/java-17-openjdk-amd64"' >> "$BASH_ENV" + echo "export SPARK=\"${SPARK}\"" >> "$BASH_ENV" + echo "export JAVA=\"${JAVA}\"" >> "$BASH_ENV" + echo "export JAVA_BIN=\"${JAVA_BIN}\"" >> "$BASH_ENV" + echo "export JAVAC_BIN=\"${JAVAC_BIN}\"" >> "$BASH_ENV" + echo "export SCALA=\"${SCALA}\"" >> "$BASH_ENV" + echo "Setting default Java to ${JAVA_BIN}" + sudo update-alternatives --set java ${JAVA_BIN} + sudo update-alternatives --set javac ${JAVAC_BIN} store_submodule_tests: parameters: submodule: @@ -620,24 +645,8 @@ jobs: command: ./../../.circleci/checksum.sh /tmp/checksum.txt $CIRCLE_BRANCH - attach_workspace: at: ~/ - - run: - name: Spark & Java version Variable - command: | - JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2) - SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2) - SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2) - echo spark=$SPARK java=$JAVA scala=$SCALA - JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64' - JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64' - - echo 'export JAVA17_HOME=/usr/lib/jvm/java-17-openjdk-amd64' >> "$BASH_ENV" - echo 'export SPARK='${SPARK} >> "$BASH_ENV" - echo 'export JAVA_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java") >> "$BASH_ENV" - echo 'export JAVAC_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac") >> "$BASH_ENV" - echo 'export SCALA='${SCALA} >> "$BASH_ENV" - echo "${JAVA}" - echo "${JAVA_BIN}" - echo "${JAVAC_BIN}" + - set_java_spark_scala_version: + env-variant: << parameters.env-variant >> - restore_cache: keys: - v1-integration-spark-{{ checksum "/tmp/checksum.txt" }} @@ -689,33 +698,15 @@ jobs: - run: name: Generate cache key command: ./../../.circleci/checksum.sh /tmp/checksum.txt $CIRCLE_BRANCH - - run: - name: Spark & Java version Variable - command: | - JAVA=$(echo << parameters.env-variant >> | cut -d '-' -f 1 | cut -d ':' -f 2) - SPARK=$(echo << parameters.env-variant >> | cut -d '-' -f 2 | cut -d ':' -f 2) - SCALA=$(echo << parameters.env-variant >> | cut -d '-' -f 3 | cut -d ':' -f 2) - echo spark=$SPARK java=$JAVA scala=$SCALA - JAVA8_HOME='/usr/lib/jvm/java-8-openjdk-amd64' - JAVA17_HOME='/usr/lib/jvm/java-17-openjdk-amd64' - - echo 'export JAVA17_HOME=/usr/lib/jvm/java-17-openjdk-amd64' >> "$BASH_ENV" - echo 'export SPARK_VERSION_VAR='${SPARK} >> "$BASH_ENV" - echo 'export SCALA='${SCALA} >> "$BASH_ENV" - echo 'export JAVA_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/java" || echo "$JAVA8_HOME/jre/bin/java") >> "$BASH_ENV" - echo 'export JAVAC_BIN='$([ "$JAVA" = "17" ] && echo "$JAVA17_HOME/bin/javac" || echo "$JAVA8_HOME/bin/javac") >> "$BASH_ENV" - echo $JAVA_BIN + - set_java_spark_scala_version: + env-variant: << parameters.env-variant >> - run: mkdir -p app/build/gcloud && echo $GCLOUD_SERVICE_KEY > app/build/gcloud/gcloud-service-key.json && chmod 644 app/build/gcloud/gcloud-service-key.json - restore_cache: keys: - v1-integration-spark-{{ checksum "/tmp/checksum.txt" }} - attach_workspace: at: ~/ - - run: | - echo "Setting default Java to ${JAVA_BIN}" - sudo update-alternatives --set java ${JAVA_BIN} - sudo update-alternatives --set javac ${JAVAC_BIN} - - run: ./gradlew --no-daemon --console=plain integrationTest -x test -Pspark.version=${SPARK_VERSION_VAR} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} + - run: ./gradlew --no-daemon --console=plain integrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} - run: ./gradlew --no-daemon --console=plain jacocoTestReport -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} - store_test_results: path: app/build/test-results/integrationTest @@ -846,7 +837,7 @@ jobs: integration-test-databricks-integration-spark: parameters: - spark-version: + env-variant: type: string working_directory: ~/openlineage/integration/spark machine: @@ -871,12 +862,10 @@ jobs: - v1-integration-spark-{{ checksum "/tmp/checksum.txt" }} - attach_workspace: at: ~/ - - set_java_version - - run: | - sudo update-alternatives --set java ${JAVA_BIN} - sudo update-alternatives --set javac ${JAVAC_BIN} + - set_java_spark_scala_version: + env-variant: << parameters.env-variant >> - run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME} - - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=<< parameters.spark-version >> -Dopenlineage.tests.databricks.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.token=$DATABRICKS_TOKEN -Pjava.compile.home=${JAVA17_HOME} + - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -PdatabricksHost=${DATABRICKS_HOST} -PdatabricksToken=${DATABRICKS_TOKEN} - store_test_results: path: app/build/test-results/databricksIntegrationTest - store_artifacts: diff --git a/.circleci/workflows/openlineage-spark.yml b/.circleci/workflows/openlineage-spark.yml index e06442329c..03feeea280 100644 --- a/.circleci/workflows/openlineage-spark.yml +++ b/.circleci/workflows/openlineage-spark.yml @@ -23,17 +23,17 @@ workflows: parameters: env-variant: [ 'java:8-spark:2.4.8-scala:2.12', - 'java:8-spark:3.2.4-scala:2.12', - 'java:8-spark:3.2.4-scala:2.13', - 'java:8-spark:3.3.4-scala:2.12', - 'java:8-spark:3.3.4-scala:2.13', - 'java:17-spark:3.3.4-scala:2.12', - 'java:17-spark:3.3.4-scala:2.13', - 'java:8-spark:3.4.3-scala:2.12', - 'java:8-spark:3.4.3-scala:2.13', - 'java:8-spark:3.5.2-scala:2.12', - 'java:8-spark:3.5.2-scala:2.13', - 'java:17-spark:3.5.2-scala:2.12', + 'java:8-spark:3.2.4-scala:2.12-full-tests', + 'java:8-spark:3.2.4-scala:2.13-full-tests', + 'java:8-spark:3.3.4-scala:2.12-full-tests', + 'java:8-spark:3.3.4-scala:2.13-full-tests', + 'java:17-spark:3.3.4-scala:2.12-full-tests', + 'java:17-spark:3.3.4-scala:2.13-full-tests', + 'java:8-spark:3.4.3-scala:2.12-full-tests', + 'java:8-spark:3.4.3-scala:2.13-full-tests', + 'java:8-spark:3.5.2-scala:2.12-full-tests', + 'java:8-spark:3.5.2-scala:2.13-full-tests', + 'java:17-spark:3.5.2-scala:2.12-full-tests', 'java:17-spark:3.5.2-scala:2.13', 'java:17-spark:4.0.0-scala:2.13' ] @@ -92,7 +92,10 @@ workflows: context: integration-tests matrix: parameters: - spark-version: [ '3.4.2', '3.5.2' ] + env-variant: [ + 'java:8-spark:3.4.3-scala:2.12-full-tests', + 'java:17-spark:3.5.2-scala:2.13-full-tests' + ] requires: - approval-integration-spark post-steps: @@ -112,17 +115,17 @@ workflows: parameters: env-variant: [ 'java:8-spark:2.4.8-scala:2.12', - 'java:8-spark:3.2.4-scala:2.12', - 'java:8-spark:3.2.4-scala:2.13', - 'java:8-spark:3.3.4-scala:2.12', - 'java:8-spark:3.3.4-scala:2.13', - 'java:17-spark:3.3.4-scala:2.12', - 'java:17-spark:3.3.4-scala:2.13', - 'java:8-spark:3.4.3-scala:2.12', - 'java:8-spark:3.4.3-scala:2.13', - 'java:8-spark:3.5.2-scala:2.12', - 'java:8-spark:3.5.2-scala:2.13', - 'java:17-spark:3.5.2-scala:2.12', + 'java:8-spark:3.2.4-scala:2.12-full-tests', + 'java:8-spark:3.2.4-scala:2.13-full-tests', + 'java:8-spark:3.3.4-scala:2.12-full-tests', + 'java:8-spark:3.3.4-scala:2.13-full-tests', + 'java:17-spark:3.3.4-scala:2.12-full-tests', + 'java:17-spark:3.3.4-scala:2.13-full-tests', + 'java:8-spark:3.4.3-scala:2.12-full-tests', + 'java:8-spark:3.4.3-scala:2.13-full-tests', + 'java:8-spark:3.5.2-scala:2.12-full-tests', + 'java:8-spark:3.5.2-scala:2.13-full-tests', + 'java:17-spark:3.5.2-scala:2.12-full-tests', 'java:17-spark:3.5.2-scala:2.13', 'java:17-spark:4.0.0-scala:2.13' ] diff --git a/dev/filter_approvals.py b/dev/filter_approvals.py new file mode 100644 index 0000000000..c47754bb47 --- /dev/null +++ b/dev/filter_approvals.py @@ -0,0 +1,34 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 + +import yaml + +with open("complete_config.yml") as f: + d = yaml.safe_load(f) + +for _, workflow_definition in d["workflows"].items(): + jobs = workflow_definition.get("jobs") if isinstance(workflow_definition, dict) else None + if not jobs: + continue + + # find all approvals + approvals = list( + filter(lambda x: isinstance(x, dict) and next(iter(x.values())).get("type") == "approval", jobs) + ) + for approval in approvals: + approval_name = next(iter(approval)) + approval_upstreams = approval[approval_name].get("requires") + approval_downstream = list( + filter( + lambda x: isinstance(x, dict) and approval_name in next(iter(x.values())).get("requires", ""), + jobs, + ) + ) + # replace approval with its upstream jobs + for job in approval_downstream: + requires = next(iter(job.values()))["requires"] + requires.remove(approval_name) + requires.extend(approval_upstreams) + jobs.remove(approval) +with open("complete_config.yml", "w") as f: + f.write(yaml.dump(d, sort_keys=False)) diff --git a/dev/filter_matrix.py b/dev/filter_matrix.py new file mode 100644 index 0000000000..2183a75b4a --- /dev/null +++ b/dev/filter_matrix.py @@ -0,0 +1,26 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 + +import yaml + +with open("complete_config.yml") as f: + d = yaml.safe_load(f) + +for _, workflow_definition in d["workflows"].items(): + jobs = workflow_definition.get("jobs") if isinstance(workflow_definition, dict) else None + if not jobs: + continue + + for job in jobs: + if "test-integration-spark" in job: + test_job = job["test-integration-spark"] + elif "integration-test-integration-spark" in job: + integration_test_job = job["integration-test-integration-spark"] + + for job in filter(None, [test_job, integration_test_job]): + variants = [ + x for x in test_job.get("matrix").get("parameters").get("env-variant") if "full-tests" not in x + ] + job["matrix"]["parameters"]["env-variant"] = variants +with open("complete_config.yml", "w") as f: + f.write(yaml.dump(d, sort_keys=False)) From 76a8948d0bd80517e60a851dae1599eee5538aea Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Fri, 25 Oct 2024 08:29:31 +0200 Subject: [PATCH 27/89] local file system should not point to IP (#3159) Signed-off-by: Maciej Obuchowski --- spec/Naming.md | 15 +++++++++++++++ website/docs/spec/naming.md | 3 ++- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/spec/Naming.md b/spec/Naming.md index 1c014e393f..39d2679d36 100644 --- a/spec/Naming.md +++ b/spec/Naming.md @@ -1,5 +1,7 @@ # Naming +## This document is obsolete, please look at the [OpenLineage Naming Conventions](https://openlineage.io/docs/spec/naming) + We define the unique name strategy per resource to ensure it is followed uniformly independently of who is producing metadata, so we can connect lineage from various sources. @@ -381,6 +383,19 @@ Identifier : ### Local file system +Naming hierarchy: + +- Path + +Identifier : + +- Namespace: file + - Scheme = file +- Unique name: {path} + - URI = file://{path} + +### Remote file system + Datasource hierarchy: - IP diff --git a/website/docs/spec/naming.md b/website/docs/spec/naming.md index c491d8fed6..ecd48fcef6 100644 --- a/website/docs/spec/naming.md +++ b/website/docs/spec/naming.md @@ -33,7 +33,8 @@ A dataset, or `table`, is organized according to a producer, namespace, database | GCS | Blob storage | gs://{bucket name} | {object key} | | HDFS | Distributed file system | hdfs://{namenode host}:{namenode port} | {path} | | Kafka | distributed event streaming platform | kafka://{bootstrap server host}:{port} | {topic} | -| Local file system | File system | file://{host} | {path} | +| Local file system | File system | file | {path} | +| Remote file system | File system | file://{host} | {path} | | S3 | Blob Storage | s3://{bucket name} | {object key} | | WASBS (Azure Blob Storage) | Blob Storage | wasbs://{container name}@{service name}.dfs.core.windows.net | {object key} | From b6b4cbafb25ea3cb32989e02050cd3bc1f4fccad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 08:26:59 +0100 Subject: [PATCH 28/89] Bump the client-java group in /client/java with 4 updates (#3188) Bumps the client-java group in /client/java with 4 updates: [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5), [com.google.cloud:google-cloud-storage](https://github.com/googleapis/java-storage), software.amazon.awssdk:bom and [org.testcontainers:testcontainers-bom](https://github.com/testcontainers/testcontainers-java). Updates `org.junit.jupiter:junit-jupiter` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `com.google.cloud:google-cloud-storage` from 2.43.2 to 2.44.1 - [Release notes](https://github.com/googleapis/java-storage/releases) - [Changelog](https://github.com/googleapis/java-storage/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/java-storage/compare/v2.43.2...v2.44.1) Updates `software.amazon.awssdk:bom` from 2.28.26 to 2.29.1 Updates `org.testcontainers:testcontainers-bom` from 1.20.2 to 1.20.3 - [Release notes](https://github.com/testcontainers/testcontainers-java/releases) - [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.20.2...1.20.3) --- updated-dependencies: - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-storage dependency-type: direct:production update-type: version-update:semver-minor dependency-group: client-java - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-minor dependency-group: client-java - dependency-name: org.testcontainers:testcontainers-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- client/java/build.gradle | 2 +- client/java/transports-gcs/build.gradle | 2 +- client/java/transports-s3/build.gradle | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/client/java/build.gradle b/client/java/build.gradle index 777be11e79..5e1b0c16d4 100644 --- a/client/java/build.gradle +++ b/client/java/build.gradle @@ -48,7 +48,7 @@ repositories { ext { assertjVersion = '3.26.3' jacksonVersion = "2.15.3" - junit5Version = '5.11.2' + junit5Version = '5.11.3' lombokVersion = '1.18.34' mockitoVersion = '5.2.0' micrometerVersion = '1.13.6' diff --git a/client/java/transports-gcs/build.gradle b/client/java/transports-gcs/build.gradle index 6556e4a13a..5306bd0155 100644 --- a/client/java/transports-gcs/build.gradle +++ b/client/java/transports-gcs/build.gradle @@ -23,7 +23,7 @@ ext { } dependencies { - implementation('com.google.cloud:google-cloud-storage:2.43.2') + implementation('com.google.cloud:google-cloud-storage:2.44.1') testImplementation('com.google.cloud:google-cloud-nio:0.127.25') } diff --git a/client/java/transports-s3/build.gradle b/client/java/transports-s3/build.gradle index ada4642003..45a3d933d3 100644 --- a/client/java/transports-s3/build.gradle +++ b/client/java/transports-s3/build.gradle @@ -22,7 +22,7 @@ plugins { ext { projectDescription = "S3 OpenLineage transport library" s3MockVersion = "3.11.0" - testcontainersVersion = "1.20.2" + testcontainersVersion = "1.20.3" } sourceSets { @@ -37,7 +37,7 @@ sourceSets { dependencies { compileOnly("com.google.code.findbugs:jsr305:3.0.2") - implementation(platform("software.amazon.awssdk:bom:2.28.26")) + implementation(platform("software.amazon.awssdk:bom:2.29.1")) implementation("software.amazon.awssdk:auth") implementation("software.amazon.awssdk:s3") implementation("software.amazon.awssdk:url-connection-client") From cacf8abd62e1637d5e01e8e22da8006ae4ff9298 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 08:27:38 +0100 Subject: [PATCH 29/89] Bump the integration-spark group across 1 directory with 8 updates (#3189) Bumps the integration-spark group with 8 updates in the /integration/spark directory: | Package | From | To | | --- | --- | --- | | [io.micrometer:micrometer-core](https://github.com/micrometer-metrics/micrometer) | `1.13.5` | `1.13.6` | | [io.micrometer:micrometer-registry-statsd](https://github.com/micrometer-metrics/micrometer) | `1.13.5` | `1.13.6` | | [org.junit.jupiter:junit-jupiter-api](https://github.com/junit-team/junit5) | `5.11.2` | `5.11.3` | | [org.junit.jupiter:junit-jupiter](https://github.com/junit-team/junit5) | `5.11.2` | `5.11.3` | | [org.junit:junit-bom](https://github.com/junit-team/junit5) | `5.11.2` | `5.11.3` | | software.amazon.awssdk:bom | `2.28.11` | `2.29.1` | | [org.xerial:sqlite-jdbc](https://github.com/xerial/sqlite-jdbc) | `3.46.1.3` | `3.47.0.0` | | [org.testcontainers:testcontainers-bom](https://github.com/testcontainers/testcontainers-java) | `1.20.2` | `1.20.3` | Updates `io.micrometer:micrometer-core` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) Updates `org.junit.jupiter:junit-jupiter-api` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `org.junit.jupiter:junit-jupiter` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `org.junit:junit-bom` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `org.junit.jupiter:junit-jupiter` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `software.amazon.awssdk:bom` from 2.28.11 to 2.29.1 Updates `org.junit:junit-bom` from 5.11.2 to 5.11.3 - [Release notes](https://github.com/junit-team/junit5/releases) - [Commits](https://github.com/junit-team/junit5/compare/r5.11.2...r5.11.3) Updates `org.xerial:sqlite-jdbc` from 3.46.1.3 to 3.47.0.0 - [Release notes](https://github.com/xerial/sqlite-jdbc/releases) - [Changelog](https://github.com/xerial/sqlite-jdbc/blob/master/CHANGELOG) - [Commits](https://github.com/xerial/sqlite-jdbc/compare/3.46.1.3...3.47.0.0) Updates `org.testcontainers:testcontainers-bom` from 1.20.2 to 1.20.3 - [Release notes](https://github.com/testcontainers/testcontainers-java/releases) - [Changelog](https://github.com/testcontainers/testcontainers-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/testcontainers/testcontainers-java/compare/1.20.2...1.20.3) Updates `io.micrometer:micrometer-registry-statsd` from 1.13.5 to 1.13.6 - [Release notes](https://github.com/micrometer-metrics/micrometer/releases) - [Commits](https://github.com/micrometer-metrics/micrometer/compare/v1.13.5...v1.13.6) --- updated-dependencies: - dependency-name: io.micrometer:micrometer-core dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter-api dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit:junit-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.junit.jupiter:junit-jupiter dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-minor dependency-group: integration-spark - dependency-name: org.junit:junit-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.xerial:sqlite-jdbc dependency-type: direct:production update-type: version-update:semver-minor dependency-group: integration-spark - dependency-name: org.testcontainers:testcontainers-bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: io.micrometer:micrometer-registry-statsd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- integration/spark/app/build.gradle | 10 +++++----- integration/spark/shared/build.gradle | 8 ++++---- integration/spark/spark2/build.gradle | 4 ++-- integration/spark/spark3/build.gradle | 4 ++-- integration/spark/spark31/build.gradle | 4 ++-- integration/spark/spark32/build.gradle | 4 ++-- integration/spark/spark33/build.gradle | 4 ++-- integration/spark/spark34/build.gradle | 4 ++-- integration/spark/spark35/build.gradle | 4 ++-- integration/spark/spark40/build.gradle | 4 ++-- integration/spark/vendor/snowflake/build.gradle | 4 ++-- 11 files changed, 27 insertions(+), 27 deletions(-) diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index d0233cb997..acb832740e 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -25,16 +25,16 @@ ext { assertjVersion = '3.26.3' bigqueryVersion = '0.35.1' - junit5Version = '5.11.2' + junit5Version = '5.11.3' mockitoVersion = '4.11.0' postgresqlVersion = '42.7.4' - testcontainersVersion = '1.20.2' + testcontainersVersion = '1.20.3' configurableTestConfig = [ sparkConfFile: project.findProperty('spark.conf.file') ?: System.getProperty('spark.conf.file'), hostDir : project.findProperty('host.dir') ?: System.getProperty('host.dir'), testDir : project.findProperty('test.dir') ?: System.getProperty('test.dir') ] - micrometerVersion = '1.13.5' + micrometerVersion = '1.13.6' } // This workaround is needed because the version of Snappy that Spark 2.4.x runs with, @@ -107,7 +107,7 @@ dependencies { testImplementation("org.junit.jupiter:junit-jupiter-params") testImplementation("org.postgresql:postgresql:${postgresqlVersion}") testImplementation('org.hamcrest:hamcrest-library:3.0') - testImplementation('org.xerial:sqlite-jdbc:3.46.1.3') + testImplementation('org.xerial:sqlite-jdbc:3.47.0.0') testImplementation(platform("org.testcontainers:testcontainers-bom:${testcontainersVersion}")) testImplementation("org.testcontainers:junit-jupiter") testImplementation("org.testcontainers:postgresql") @@ -124,7 +124,7 @@ dependencies { exclude group: 'com.fasterxml.jackson.module' } - testImplementation(platform("software.amazon.awssdk:bom:2.28.21")) + testImplementation(platform("software.amazon.awssdk:bom:2.29.1")) testImplementation("software.amazon.awssdk:auth") testImplementation("software.amazon.awssdk:emr") testImplementation("software.amazon.awssdk:s3") diff --git a/integration/spark/shared/build.gradle b/integration/spark/shared/build.gradle index a760885c4c..3d3a1094de 100644 --- a/integration/spark/shared/build.gradle +++ b/integration/spark/shared/build.gradle @@ -29,15 +29,15 @@ ext { awaitilityVersion = "4.2.2" bigqueryVersion = "0.41.0" databricksVersion = "0.1.4" - junit5Version = "5.11.2" + junit5Version = "5.11.3" kafkaClientsVersion = "3.8.0" - micrometerVersion = '1.13.5' + micrometerVersion = '1.13.6' mockitoVersion = "4.11.0" mockserverVersion = "5.14.0" postgresqlVersion = "42.7.4" - sqlLiteVersion = "3.46.1.3" + sqlLiteVersion = "3.47.0.0" testcontainersVersion = "1.19.3" - awsSdkVersion = '2.28.11' + awsSdkVersion = '2.29.1' sparkVersion = project.findProperty("shared.spark.version") sparkSeries = sparkVersion.substring(0, 3) diff --git a/integration/spark/spark2/build.gradle b/integration/spark/spark2/build.gradle index f76de18900..d3ba6d25dd 100644 --- a/integration/spark/spark2/build.gradle +++ b/integration/spark/spark2/build.gradle @@ -25,10 +25,10 @@ ext { deltaVersion = "1.1.0" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" postgresqlVersion = "42.7.4" - micrometerVersion = '1.13.5' + micrometerVersion = '1.13.6' sparkVersion = project.findProperty("spark2.spark.version") scalaBinaryVersion = "2.11" diff --git a/integration/spark/spark3/build.gradle b/integration/spark/spark3/build.gradle index d5b0052849..8f8f9fd66c 100644 --- a/integration/spark/spark3/build.gradle +++ b/integration/spark/spark3/build.gradle @@ -27,10 +27,10 @@ ext { deltaVersion = "1.1.0" icebergVersion = "1.4.3" jacksonVersion = "2.15.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" lombokVersion = "1.18.30" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark3.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark31/build.gradle b/integration/spark/spark31/build.gradle index 7ca412f9f8..2426960601 100644 --- a/integration/spark/spark31/build.gradle +++ b/integration/spark/spark31/build.gradle @@ -16,9 +16,9 @@ scalaVariants { ext { assertjVersion = "3.26.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark31.spark.version") } diff --git a/integration/spark/spark32/build.gradle b/integration/spark/spark32/build.gradle index d697dc74b4..594e3bc7a5 100644 --- a/integration/spark/spark32/build.gradle +++ b/integration/spark/spark32/build.gradle @@ -25,9 +25,9 @@ ext { deltaVersion = "1.1.0" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark32.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark33/build.gradle b/integration/spark/spark33/build.gradle index be4367382c..6197993bed 100644 --- a/integration/spark/spark33/build.gradle +++ b/integration/spark/spark33/build.gradle @@ -24,9 +24,9 @@ ext { assertjVersion = "3.26.3" icebergVersion = "0.14.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark33.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark34/build.gradle b/integration/spark/spark34/build.gradle index e3c317c44f..dcd06eebac 100644 --- a/integration/spark/spark34/build.gradle +++ b/integration/spark/spark34/build.gradle @@ -25,9 +25,9 @@ ext { deltaVersion = "2.4.0" icebergVersion = "1.3.1" jacksonVersion = "2.15.3" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark34.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark35/build.gradle b/integration/spark/spark35/build.gradle index 36314f18ba..15d5223e85 100644 --- a/integration/spark/spark35/build.gradle +++ b/integration/spark/spark35/build.gradle @@ -23,9 +23,9 @@ idea { ext { assertjVersion = "3.26.3" commonsLangVersion = "3.12.0" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark35.spark.version") scalaBinaryVersion = project.findProperty("scala.binary.version") diff --git a/integration/spark/spark40/build.gradle b/integration/spark/spark40/build.gradle index 53e7af1067..855d9f24d0 100644 --- a/integration/spark/spark40/build.gradle +++ b/integration/spark/spark40/build.gradle @@ -28,9 +28,9 @@ idea { ext { assertjVersion = "3.26.3" commonsLangVersion = "3.12.0" - junit5Version = "5.11.2" + junit5Version = "5.11.3" mockitoVersion = "4.11.0" - micrometerVersion = "1.13.5" + micrometerVersion = "1.13.6" sparkVersion = project.findProperty("spark40.spark.version") + "-preview1" scalaBinaryVersion = "2.13" diff --git a/integration/spark/vendor/snowflake/build.gradle b/integration/spark/vendor/snowflake/build.gradle index fad94f4849..b56371f72f 100644 --- a/integration/spark/vendor/snowflake/build.gradle +++ b/integration/spark/vendor/snowflake/build.gradle @@ -7,11 +7,11 @@ plugins { ext { assertjVersion = '3.26.3' - junit5Version = '5.11.2' + junit5Version = '5.11.3' snowflakeVersion = '2.13.0' mockitoVersion = '4.11.0' lombokVersion = '1.18.30' - micrometerVersion = '1.13.5' + micrometerVersion = '1.13.6' snowflakeLookup = [ '2.4.8': '2.9.3', From f651a32cb5e0ebe13e2e2a678b8eac462dfd39b9 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Mon, 28 Oct 2024 11:19:30 +0100 Subject: [PATCH 30/89] fix: Fix missing WEB_PORT variable where marquez-web container is used. (#3192) Signed-off-by: Artur Owczarek --- .circleci/config.yml | 3 ++- integration/spark/docker-compose.yml | 1 + proxy/backend/docker-compose.yml | 1 + proxy/fluentd/docker/docker-compose.yml | 1 + website/blog/openlineage-spark/index.mdx | 2 +- website/docs/guides/airflow-quickstart.md | 1 + website/docs/guides/spark.md | 2 +- 7 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index dcb4ff3089..0a59f3b5b5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -151,12 +151,13 @@ jobs: - run: name: Remove approval steps if not pull from forks. command: | - pip install pyyaml==6.0.1 + pip install pyyaml==6.0.1 python dev/filter_approvals.py - run: | export IS_FULL_TESTS=$(gh pr view --json labels | jq 'any(.currentBranch.labels[]; .name == "full-tests")') echo $IS_FULL_TESTS if [ -z "$IS_FULL_TESTS" ] || [ "$IS_FULL_TESTS" == "0" ]; then + pip install pyyaml==6.0.1 python dev/filter_matrix.py fi - when: diff --git a/integration/spark/docker-compose.yml b/integration/spark/docker-compose.yml index 27a4852a3b..e4b7d358a3 100644 --- a/integration/spark/docker-compose.yml +++ b/integration/spark/docker-compose.yml @@ -35,6 +35,7 @@ services: environment: - MARQUEZ_HOST=marquez-api - MARQUEZ_PORT=5000 + - WEB_PORT=3000 ports: - "3000:3000" stdin_open: true diff --git a/proxy/backend/docker-compose.yml b/proxy/backend/docker-compose.yml index 29e36134fa..4cfe92e2ae 100644 --- a/proxy/backend/docker-compose.yml +++ b/proxy/backend/docker-compose.yml @@ -36,6 +36,7 @@ services: environment: - MARQUEZ_HOST=marquez-api - MARQUEZ_PORT=5000 + - WEB_PORT=3000 ports: - "3000:3000" stdin_open: true diff --git a/proxy/fluentd/docker/docker-compose.yml b/proxy/fluentd/docker/docker-compose.yml index 86e1fd3503..7055685a14 100644 --- a/proxy/fluentd/docker/docker-compose.yml +++ b/proxy/fluentd/docker/docker-compose.yml @@ -38,6 +38,7 @@ services: environment: - MARQUEZ_HOST=marquez-api - MARQUEZ_PORT=5000 + - WEB_PORT=3000 ports: - "3000:3000" stdin_open: true diff --git a/website/blog/openlineage-spark/index.mdx b/website/blog/openlineage-spark/index.mdx index 6c21b82b43..87c658d677 100644 --- a/website/blog/openlineage-spark/index.mdx +++ b/website/blog/openlineage-spark/index.mdx @@ -211,7 +211,7 @@ For exploring visually, we’ll also want to start up the Marquez web project. W containers, run the following command in a new terminal: ```bash -docker run --network spark_default -p 3000:3000 -e MARQUEZ_HOST=marquez-api -e MARQUEZ_PORT=5000 --link marquez-api:marquez-api marquezproject/marquez-web:0.19.1 +docker run --network spark_default -p 3000:3000 -e MARQUEZ_HOST=marquez-api -e MARQUEZ_PORT=5000 -e WEB_PORT=3000 --link marquez-api:marquez-api marquezproject/marquez-web:0.19.1 ``` Now open a new browser tab and navigate to `http://localhost:3000`. You should see a screen like the following: diff --git a/website/docs/guides/airflow-quickstart.md b/website/docs/guides/airflow-quickstart.md index 062508d6c1..a7e0c9dcd5 100644 --- a/website/docs/guides/airflow-quickstart.md +++ b/website/docs/guides/airflow-quickstart.md @@ -110,6 +110,7 @@ services: environment: - MARQUEZ_HOST=api - MARQUEZ_PORT=5000 + - WEB_PORT=3000 ports: - "3000:3000" depends_on: diff --git a/website/docs/guides/spark.md b/website/docs/guides/spark.md index 232c5d0499..65b4375567 100644 --- a/website/docs/guides/spark.md +++ b/website/docs/guides/spark.md @@ -151,7 +151,7 @@ Now that the pipeline is operational it is available for lineage collection. The `docker-compose.yml` file that ships with the OpenLineage repo includes only the Jupyter notebook and the Marquez API. To explore the lineage visually, start up the Marquez web project. Without terminating the existing docker containers, run the following command in a new terminal: ``` -docker run --network spark_default -p 3000:3000 -e MARQUEZ_HOST=marquez-api -e MARQUEZ_PORT=5000 --link marquez-api:marquez-api marquezproject/marquez-web:0.19.1 +docker run --network spark_default -p 3000:3000 -e MARQUEZ_HOST=marquez-api -e MARQUEZ_PORT=5000 -e WEB_PORT=3000 --link marquez-api:marquez-api marquezproject/marquez-web:0.19.1 ``` Next, open a new browser tab and navigate to http://localhost:3000, which should look like this: From 64b76e6f3c2a7c5ab82b08302af927b807e6cf2c Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Mon, 28 Oct 2024 15:24:18 +0100 Subject: [PATCH 31/89] fix: databricks integration tests on CircleCI regression is now fixed (#3193) Signed-off-by: Artur Owczarek --- .circleci/continue_config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index 61aea0af1a..f8d1bfc726 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -865,7 +865,7 @@ jobs: - set_java_spark_scala_version: env-variant: << parameters.env-variant >> - run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME} - - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -PdatabricksHost=${DATABRICKS_HOST} -PdatabricksToken=${DATABRICKS_TOKEN} + - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -Dopenlineage.tests.databricks.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.token=$DATABRICKS_TOKEN - store_test_results: path: app/build/test-results/databricksIntegrationTest - store_artifacts: From ddb24dc8953dcb8ea7e1424182e8d85de4d541ff Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Tue, 29 Oct 2024 10:59:40 +0100 Subject: [PATCH 32/89] fix: fixes regression with bad Spark version in Databricks integration tests (#3196) Signed-off-by: Artur Owczarek --- .circleci/workflows/openlineage-spark.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/workflows/openlineage-spark.yml b/.circleci/workflows/openlineage-spark.yml index 03feeea280..2745333b50 100644 --- a/.circleci/workflows/openlineage-spark.yml +++ b/.circleci/workflows/openlineage-spark.yml @@ -93,7 +93,7 @@ workflows: matrix: parameters: env-variant: [ - 'java:8-spark:3.4.3-scala:2.12-full-tests', + 'java:8-spark:3.4.2-scala:2.12-full-tests', 'java:17-spark:3.5.2-scala:2.13-full-tests' ] requires: From efcbf1d7c58b9e7305c12d270db7b7cd84e98305 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 30 Oct 2024 08:17:33 +0100 Subject: [PATCH 33/89] fix: Fixes names normalization. (#3053) * The job normalization now includes more cases where parenthesis and acronyms are better handled. Signed-off-by: Artur Owczarek --- .../container/pysparkHadoopFSEndEvent.json | 2 +- .../container/pysparkHadoopFSStartEvent.json | 2 +- .../container/pysparkLoadComplete.json | 2 +- .../container/pysparkLoadStart.json | 2 +- .../pysparkRDDWithParquetComplete.json | 2 +- .../spark/agent/util/GCPUtils.java | 10 +--- .../spark/agent/util/PlanUtils.java | 5 +- .../naming/ApplicationJobNameResolver.java | 5 +- .../spark/api/naming/JobNameBuilder.java | 18 ++---- .../spark/api/naming/NameNormalizer.java | 42 ++++++++++++++ .../agent/lifecycle/ExecutionContextTest.java | 28 ---------- .../spark/api/naming/JobNameBuilderTest.java | 12 ++-- .../spark/api/naming/NameNormalizerTest.java | 55 +++++++++++++++++++ 13 files changed, 117 insertions(+), 68 deletions(-) create mode 100644 integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/NameNormalizer.java delete mode 100644 integration/spark/shared/src/test/java/io/openlineage/spark/agent/lifecycle/ExecutionContextTest.java create mode 100644 integration/spark/shared/src/test/java/io/openlineage/spark/api/naming/NameNormalizerTest.java diff --git a/integration/spark/app/integrations/container/pysparkHadoopFSEndEvent.json b/integration/spark/app/integrations/container/pysparkHadoopFSEndEvent.json index 305a1566e1..8534dfe94c 100644 --- a/integration/spark/app/integrations/container/pysparkHadoopFSEndEvent.json +++ b/integration/spark/app/integrations/container/pysparkHadoopFSEndEvent.json @@ -2,7 +2,7 @@ "eventType": "COMPLETE", "job": { "namespace": "testPysparkSQLHadoopFSTest", - "name": "open_lineage_integration_hadoop_f_s_relation.execute_insert_into_hadoop_fs_relation_command.warehouse_target" + "name": "open_lineage_integration_hadoop_fs_relation.execute_insert_into_hadoop_fs_relation_command.warehouse_target" }, "inputs": [ { diff --git a/integration/spark/app/integrations/container/pysparkHadoopFSStartEvent.json b/integration/spark/app/integrations/container/pysparkHadoopFSStartEvent.json index 75cb922f65..66ddea8e0f 100644 --- a/integration/spark/app/integrations/container/pysparkHadoopFSStartEvent.json +++ b/integration/spark/app/integrations/container/pysparkHadoopFSStartEvent.json @@ -2,7 +2,7 @@ "eventType": "START", "job": { "namespace": "testPysparkSQLHadoopFSTest", - "name": "open_lineage_integration_hadoop_f_s_relation.execute_insert_into_hadoop_fs_relation_command.warehouse_target" + "name": "open_lineage_integration_hadoop_fs_relation.execute_insert_into_hadoop_fs_relation_command.warehouse_target" }, "inputs": [ { diff --git a/integration/spark/app/integrations/container/pysparkLoadComplete.json b/integration/spark/app/integrations/container/pysparkLoadComplete.json index a9ce876151..67304e358c 100644 --- a/integration/spark/app/integrations/container/pysparkLoadComplete.json +++ b/integration/spark/app/integrations/container/pysparkLoadComplete.json @@ -2,7 +2,7 @@ "eventType" : "COMPLETE", "job" : { "namespace" : "testCreateAsSelectAndLoad", - "name" : "open_lineage_integration_cta_s_load.execute_load_data_command.default_tbl1" + "name" : "open_lineage_integration_ctas_load.execute_load_data_command.default_tbl1" }, "inputs" : [ ], "outputs" : [ { diff --git a/integration/spark/app/integrations/container/pysparkLoadStart.json b/integration/spark/app/integrations/container/pysparkLoadStart.json index 3660bc8685..5d09ba2a48 100644 --- a/integration/spark/app/integrations/container/pysparkLoadStart.json +++ b/integration/spark/app/integrations/container/pysparkLoadStart.json @@ -2,7 +2,7 @@ "eventType" : "START", "job" : { "namespace" : "testCreateAsSelectAndLoad", - "name" : "open_lineage_integration_cta_s_load.execute_load_data_command.default_tbl1" + "name" : "open_lineage_integration_ctas_load.execute_load_data_command.default_tbl1" }, "inputs" : [ ], "outputs" : [ { diff --git a/integration/spark/app/integrations/container/pysparkRDDWithParquetComplete.json b/integration/spark/app/integrations/container/pysparkRDDWithParquetComplete.json index 9ce508f204..13e6800a3e 100644 --- a/integration/spark/app/integrations/container/pysparkRDDWithParquetComplete.json +++ b/integration/spark/app/integrations/container/pysparkRDDWithParquetComplete.json @@ -2,7 +2,7 @@ "eventType": "COMPLETE", "job": { "namespace": "testRddWithParquet", - "name": "open_lineage_integration_rd_d_with_parquet.execute_insert_into_hadoop_fs_relation_command.tmp_rdd_c" + "name": "open_lineage_integration_rdd_with_parquet.execute_insert_into_hadoop_fs_relation_command.tmp_rdd_c" }, "inputs": [ { diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java index a114af9192..ce5996db16 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java @@ -10,11 +10,11 @@ import com.google.common.io.CharStreams; import io.openlineage.client.Environment; import io.openlineage.spark.api.OpenLineageContext; +import io.openlineage.spark.api.naming.NameNormalizer; import java.io.IOException; import java.io.InputStreamReader; import java.util.Arrays; import java.util.HashMap; -import java.util.Locale; import java.util.Map; import java.util.Optional; import org.apache.http.Consts; @@ -114,7 +114,7 @@ public static Optional getSparkQueryExecutionNodeName(OpenLineageContext SparkPlan node = context.getQueryExecution().get().executedPlan(); if (node instanceof WholeStageCodegenExec) node = ((WholeStageCodegenExec) node).child(); - return Optional.of(normalizeName(node.nodeName())); + return Optional.of(NameNormalizer.normalize(node.nodeName())); } private static ResourceType identifyResource(SparkContext context) { @@ -211,12 +211,6 @@ private static Map createDataprocOriginMap(SparkContext context) return originProperties; } - private static String normalizeName(String name) { - String CAMEL_TO_SNAKE_CASE = - "[\\s\\-_]?((?<=.)[A-Z](?=[a-z\\s\\-_])|(?<=[^A-Z])[A-Z]|((?<=[\\s\\-_])[a-z\\d]))"; - return name.replaceAll(CAMEL_TO_SNAKE_CASE, "_$1").toLowerCase(Locale.ROOT); - } - private static Optional getPropertyFromYarnTag(SparkContext context, String tagPrefix) { String yarnTag = context.getConf().get(SPARK_YARN_TAGS, null); if (yarnTag == null) { diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PlanUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PlanUtils.java index 05f7a48e36..adb222d726 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PlanUtils.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/PlanUtils.java @@ -5,18 +5,17 @@ package io.openlineage.spark.agent.util; -import static io.openlineage.spark.agent.lifecycle.ExecutionContext.CAMEL_TO_SNAKE_CASE; import static io.openlineage.spark.agent.util.ScalaConversionUtils.asJavaOptional; import io.openlineage.client.OpenLineage; import io.openlineage.spark.agent.Versions; +import io.openlineage.spark.api.naming.NameNormalizer; import java.io.IOException; import java.net.URI; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Locale; import java.util.Objects; import java.util.Optional; import java.util.UUID; @@ -231,7 +230,7 @@ public static OpenLineage.ParentRunFacet parentRunFacet( .run(new OpenLineage.ParentRunFacetRunBuilder().runId(parentRunId).build()) .job( new OpenLineage.ParentRunFacetJobBuilder() - .name(parentJob.replaceAll(CAMEL_TO_SNAKE_CASE, "_$1").toLowerCase(Locale.ROOT)) + .name(NameNormalizer.normalize(parentJob)) .namespace(parentJobNamespace) .build()) .build(); diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/ApplicationJobNameResolver.java b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/ApplicationJobNameResolver.java index 43fd727cac..3ea7d6dac9 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/ApplicationJobNameResolver.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/ApplicationJobNameResolver.java @@ -4,12 +4,9 @@ */ package io.openlineage.spark.api.naming; -import static io.openlineage.spark.agent.lifecycle.ExecutionContext.CAMEL_TO_SNAKE_CASE; - import com.google.common.collect.ImmutableList; import io.openlineage.spark.api.OpenLineageContext; import java.util.List; -import java.util.Locale; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -59,7 +56,7 @@ public static List buildProvidersList() { * non-alphanumeric characters with underscores ('_'). */ private static String normalizeName(String name) { - String normalizedName = name.replaceAll(CAMEL_TO_SNAKE_CASE, "_$1").toLowerCase(Locale.ROOT); + String normalizedName = NameNormalizer.normalize(name); log.debug("The application name [{}] has been normalized to [{}]", name, normalizedName); return normalizedName; } diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/JobNameBuilder.java b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/JobNameBuilder.java index 67274ca2dc..e8675ffb6e 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/JobNameBuilder.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/JobNameBuilder.java @@ -5,7 +5,6 @@ package io.openlineage.spark.api.naming; -import static io.openlineage.spark.agent.lifecycle.ExecutionContext.CAMEL_TO_SNAKE_CASE; import static io.openlineage.spark.agent.util.DatabricksUtils.prettifyDatabricksJobName; import io.openlineage.spark.agent.util.DatabricksUtils; @@ -14,7 +13,6 @@ import io.openlineage.spark.api.SparkOpenLineageConfig; import io.openlineage.spark.api.SparkOpenLineageConfig.JobNameConfig; import java.util.List; -import java.util.Locale; import java.util.Optional; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; @@ -44,13 +42,13 @@ public static String build(OpenLineageContext context) { nodeName -> jobNameBuilder .append(JOB_NAME_PARTS_SEPARATOR) - .append(replaceDots(context, normalizeName(nodeName)))); + .append(replaceDots(context, NameNormalizer.normalize(nodeName)))); String jobName; if (context.getOpenLineageConfig().getJobName() != null && !context.getOpenLineageConfig().getJobName().getAppendDatasetName()) { // no need to append output dataset name - jobName = normalizeName(jobNameBuilder.toString()); + jobName = jobNameBuilder.toString(); } else { // append output dataset as job suffix jobNameBuilder.append( @@ -72,8 +70,9 @@ public static String build(OpenLineageContext context) { } public static String build(OpenLineageContext context, String rddSuffix) { - return normalizeName( - applicationJobNameResolver.getJobName(context) + JOB_NAME_PARTS_SEPARATOR + rddSuffix); + return applicationJobNameResolver.getJobName(context) + + JOB_NAME_PARTS_SEPARATOR + + NameNormalizer.normalize(rddSuffix); } private static String replaceDots(OpenLineageContext context, String jobName) { @@ -120,11 +119,6 @@ private static Optional sparkNodeName(OpenLineageContext context) { node = ((WholeStageCodegenExec) node).child(); } - return Optional.ofNullable(node).map(SparkPlan::nodeName).map(JobNameBuilder::normalizeName); - } - - // normalizes string, changes CamelCase to snake_case and replaces all non-alphanumerics with '_' - private static String normalizeName(String name) { - return name.replaceAll(CAMEL_TO_SNAKE_CASE, "_$1").toLowerCase(Locale.ROOT); + return Optional.ofNullable(node).map(SparkPlan::nodeName).map(NameNormalizer::normalize); } } diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/NameNormalizer.java b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/NameNormalizer.java new file mode 100644 index 0000000000..3d0351a6bb --- /dev/null +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/api/naming/NameNormalizer.java @@ -0,0 +1,42 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ +package io.openlineage.spark.api.naming; + +import java.text.Normalizer; +import java.util.Locale; +import lombok.experimental.UtilityClass; + +@UtilityClass +public class NameNormalizer { + /** Normalizes the input stream into the version_with_underscores_only. */ + public static String normalize(String input) { + /* + First, trim non-letters on both ends. + */ + String trimmed = input.replaceAll("^[^\\w]+", "").replaceAll("[^\\w]+$", ""); + /* + At this point we break every letter with an accent (āăąēîïĩíĝġńñšŝśûůŷ) into two characters + (a letter followed by its accent). Then we remove the accents. + */ + String normalizedAccents = + Normalizer.normalize(trimmed, Normalizer.Form.NFKD).replaceAll("\\p{M}", ""); + /* + Now we can detect separate words. + First we add separation for cases like the following: "camelCaseExample" -> "camel Case Example" + Then we handle acronyms: "someACRONYMInside" -> "some ACRONYM Inside" + We add some extra check that we do not make the second part at the beginning of the input. Otherwise, we would add + unnecessary space at the beginning in some cases. + */ + String withSeparatedWords = + normalizedAccents + .replaceAll("([a-z])([A-Z])", "$1 $2") + .replaceAll("(? Date: Wed, 30 Oct 2024 09:01:24 +0100 Subject: [PATCH 34/89] feature: improve Databricks integration tests (#3195) * Extract DatabricksEnvironment to manage Databricks resources * Add possibility to fetch log4j, stdout and stderr logs Signed-off-by: Artur Owczarek --- .circleci/continue_config.yml | 2 +- .../agent/DatabricksDynamicParameter.java | 42 ++- ...sUtils.java => DatabricksEnvironment.java} | 311 ++++++++++++------ .../agent/DatabricksIntegrationTest.java | 109 +++--- 4 files changed, 291 insertions(+), 173 deletions(-) rename integration/spark/app/src/test/java/io/openlineage/spark/agent/{DatabricksUtils.java => DatabricksEnvironment.java} (53%) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index f8d1bfc726..a3ab9fc5be 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -865,7 +865,7 @@ jobs: - set_java_spark_scala_version: env-variant: << parameters.env-variant >> - run: ./gradlew --console=plain shadowJar -x test -Pjava.compile.home=${JAVA17_HOME} - - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -Dopenlineage.tests.databricks.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.token=$DATABRICKS_TOKEN + - run: ./gradlew --no-daemon --console=plain databricksIntegrationTest -x test -Pspark.version=${SPARK} -Pscala.binary.version=${SCALA} -Pjava.compile.home=${JAVA17_HOME} -Dopenlineage.tests.databricks.workspace.host=$DATABRICKS_HOST -Dopenlineage.tests.databricks.workspace.token=$DATABRICKS_TOKEN - store_test_results: path: app/build/test-results/databricksIntegrationTest - store_artifacts: diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java index 0db8b32f41..7720bff5e7 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksDynamicParameter.java @@ -19,30 +19,50 @@ @Getter public enum DatabricksDynamicParameter implements DynamicParameter { + // WORKSPACE PARAMETERS + + Host("workspace.host"), + + Token("workspace.token"), + + // CLUSTER PARAMETERS + + /** The Spark version as provided by Gradle. This case is not using the openlineage prefix. */ + SparkVersion("spark.version", null, "3.5.2"), + // DEVELOPMENT PARAMETERS /** * The ID of the cluster to use. If specified, the tests will use this existing cluster instead of * creating a new one. */ - ClusterId("clusterId", ""), + ClusterId("development.clusterId", ""), /** * When set to {@code true}, prevents the EMR cluster from terminating after tests complete. This * allows for manual inspection and debugging of the cluster state. */ - PreventClusterTermination("preventClusterTermination", "false"), - - // WORKSPACE PARAMETERS - - Host("host"), - - Token("token"), + PreventClusterTermination("development.preventClusterTermination", "false"), - // CLUSTER PARAMETERS + /** + * The location where the events should be stored for troubleshooting purposes. Each test has its + * own file with execution timestamp as the prefix and the name of the script being executed. + */ + EventsFileLocation("development.eventsFileLocation", "./build"), + FetchEvents("development.fetchEvents", "./build"), - /** The Spark version as provided by Gradle. This case is not using the openlineage prefix. */ - SparkVersion("spark.version", null, null); + /** + * When set to {@code true}, the given logs are fetched and stored under specified location. They + * have the execution timestamp prefix added to the name of the file. It can take up to several + * minutes for the logs to be available on DBFS, so you may consider keeping this function off if + * you don't need them. + */ + FetchLog4jLogs("development.logs.log4j.enabled", "false"), + FetchStdout("development.logs.stdout.enabled", "false"), + FetchStderr("development.logs.stderr.enabled", "false"), + Log4jLogsLocation("development.logs.log4j.location", "./build"), + StdoutLocation("development.logs.stdout.location", "./build"), + StderrLocation("development.logs.stderr.location", "./build"); private final String parameterName; private final String defaultValue; diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java similarity index 53% rename from integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java rename to integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java index b5c13ca78a..460fa9945b 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksUtils.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java @@ -9,10 +9,13 @@ import static org.awaitility.Awaitility.await; import com.databricks.sdk.WorkspaceClient; +import com.databricks.sdk.core.DatabricksConfig; +import com.databricks.sdk.mixin.DbfsExt; import com.databricks.sdk.service.compute.ClusterDetails; import com.databricks.sdk.service.compute.ClusterLogConf; import com.databricks.sdk.service.compute.CreateCluster; import com.databricks.sdk.service.compute.CreateClusterResponse; +import com.databricks.sdk.service.compute.DataSecurityMode; import com.databricks.sdk.service.compute.DbfsStorageInfo; import com.databricks.sdk.service.compute.InitScriptInfo; import com.databricks.sdk.service.compute.ListClustersRequest; @@ -28,6 +31,7 @@ import com.databricks.sdk.service.workspace.ImportFormat; import com.databricks.sdk.support.Wait; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; import com.google.common.io.Resources; import io.openlineage.client.OpenLineage.RunEvent; import io.openlineage.client.OpenLineageClientUtils; @@ -40,52 +44,95 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; -import java.util.AbstractMap; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Base64; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import java.util.stream.Stream; import java.util.stream.StreamSupport; +import lombok.Builder; +import lombok.Getter; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; @Slf4j -public class DatabricksUtils { +public class DatabricksEnvironment implements AutoCloseable { public static final String CLUSTER_NAME = "openlineage-test-cluster"; public static final Map PLATFORM_VERSIONS_NAMES = - Stream.of( - new AbstractMap.SimpleEntry<>("3.4.2", "13.3.x-scala2.12"), - new AbstractMap.SimpleEntry<>("3.5.2", "14.2.x-scala2.12")) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + ImmutableMap.of("3.4.2", "13.3.x-scala2.12", "3.5.2", "14.2.x-scala2.12"); public static final Map PLATFORM_VERSIONS = - Stream.of( - new AbstractMap.SimpleEntry<>("3.4.2", "13.3"), - new AbstractMap.SimpleEntry<>("3.5.2", "14.2")) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); + ImmutableMap.of("3.4.2", "13.3", "3.5.2", "14.2"); public static final String NODE_TYPE = "Standard_DS3_v2"; public static final String INIT_SCRIPT_FILE = "/Shared/open-lineage-init-script.sh"; public static final String DBFS_CLUSTER_LOGS = "dbfs:/databricks/openlineage/cluster-logs"; - public static final String DBFS_EVENTS_FILE = - "dbfs:/databricks/openlineage/events_" + platformVersion() + ".log"; + private static final String executionTimestamp = + ZonedDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); + private final DatabricksEnvironmentProperties properties; + private final WorkspaceClient workspace; + private final DbfsExt dbfs; + private final String clusterId; + @Getter private final String platformVersion; + private final String dbfsEventsFile; + private final Path clusterLogs; + private final Path stdoutLogs; + private final Path stdout; + + @Builder + @Getter + static class DatabricksEnvironmentProperties { + private Workspace workspace; + private Cluster cluster; + private Development development; + + @Builder + @Getter + static class Workspace { + private String host; + private String token; + } + + @Builder + @Getter + static class Cluster { + private String sparkVersion; + } - public static String platformVersion() { - return PLATFORM_VERSIONS - .get(DatabricksDynamicParameter.SparkVersion.resolve()) - .replace(".", "_"); + @Builder + @Getter + static class Development { + private String existingClusterId; + private boolean preventClusterTermination; + private String log4jLogsLocation; + private boolean fetchLog4jLogs; + private String stdoutLocation; + private boolean fetchStdout; + private String stderrLocation; + private boolean fetchStderr; + private String eventsFileLocation; + private boolean fetchEvents; + } } - @SneakyThrows - static String init(WorkspaceClient workspace) { - String resolvedClusterId = DatabricksDynamicParameter.ClusterId.resolve(); - boolean attachingToExistingCluster = !"".equals(resolvedClusterId); + DatabricksEnvironment(DatabricksEnvironmentProperties properties) { + log.info("Initializing Databricks environment"); + this.properties = properties; + this.workspace = + new WorkspaceClient( + new DatabricksConfig() + .setHost(properties.getWorkspace().getHost()) + .setToken(properties.getWorkspace().getToken())); + this.dbfs = workspace.dbfs(); - uploadOpenLineageJar(workspace); - uploadInitializationScript(workspace); + uploadOpenLineageJar(); + // Create cluster or connect to an existing one + String resolvedClusterId = properties.getDevelopment().getExistingClusterId(); + boolean attachingToExistingCluster = !"".equals(resolvedClusterId); if (attachingToExistingCluster) { log.info("Attaching to the existing cluster [{}]", resolvedClusterId); /* @@ -95,92 +142,137 @@ static String init(WorkspaceClient workspace) { */ log.warn( "⚠️ The cluster must be restarted to apply changes if the OpenLineage jar has been updated. ⚠️"); - return resolvedClusterId; + this.clusterId = resolvedClusterId; } else { - // We may reuse the cluster name where there are existing old logs. This can happen if the - // tests failed. Here we make sure the logs are clean. - Delete deleteClusterLogs = new Delete(); - deleteClusterLogs.setPath(DBFS_CLUSTER_LOGS); - deleteClusterLogs.setRecursive(true); - workspace.dbfs().delete(deleteClusterLogs); - - log.info("Creating a new Databricks cluster."); - String sparkPlatformVersion = getSparkPlatformVersion(); - String clusterName = CLUSTER_NAME + "_" + getSparkPlatformVersion(); - log.debug("Ensuring the cluster with name [{}] doesn't exist.", clusterName); - for (ClusterDetails clusterDetail : workspace.clusters().list(new ListClustersRequest())) { - if (clusterDetail.getClusterName().equals(clusterName)) { - log.info( - "Deleting a cluster [{}] with ID [{}].", - clusterDetail.getClusterName(), - clusterDetail.getClusterId()); - workspace.clusters().permanentDelete(clusterDetail.getClusterId()); - } - } - Wait cluster = - createCluster(workspace, clusterName, sparkPlatformVersion); + this.clusterId = prepareNewCluster(); + } - String clusterId = cluster.getResponse().getClusterId(); - log.info("Ensuring the new cluster [{}] with ID [{}] is running...", clusterName, clusterId); - cluster.get(Duration.ofMinutes(10)); - return clusterId; + this.platformVersion = + PLATFORM_VERSIONS.get(properties.cluster.getSparkVersion()).replace(".", "_"); + this.dbfsEventsFile = "dbfs:/databricks/openlineage/events_" + this.platformVersion + ".log"; + this.clusterLogs = Paths.get(DBFS_CLUSTER_LOGS + "/" + clusterId + "/driver/log4j-active.log"); + this.stdoutLogs = Paths.get(DBFS_CLUSTER_LOGS + "/" + clusterId + "/driver/stdout"); + this.stdout = Paths.get(DBFS_CLUSTER_LOGS + "/" + clusterId + "/driver/stderr"); + } + + @SneakyThrows + private String prepareNewCluster() { + uploadInitializationScript(); + + // We may reuse the cluster name where there are existing old logs. This can happen if the + // tests failed. Here we make sure the logs are clean. + Delete deleteClusterLogs = new Delete(); + deleteClusterLogs.setPath(DBFS_CLUSTER_LOGS); + deleteClusterLogs.setRecursive(true); + dbfs.delete(deleteClusterLogs); + + log.info("Creating a new Databricks cluster."); + String sparkPlatformVersion = getSparkPlatformVersion(); + String clusterName = CLUSTER_NAME + "_" + sparkPlatformVersion; + ensureClusterDoesntExist(clusterName); + Wait cluster = + createCluster(clusterName, sparkPlatformVersion); + + String clusterId = cluster.getResponse().getClusterId(); + log.info("Ensuring the new cluster [{}] with ID [{}] is running...", clusterName, clusterId); + cluster.get(Duration.ofMinutes(10)); + return clusterId; + } + + private void ensureClusterDoesntExist(String clusterName) { + log.debug("Ensuring the cluster with name [{}] doesn't exist.", clusterName); + for (ClusterDetails clusterDetail : workspace.clusters().list(new ListClustersRequest())) { + if (clusterDetail.getClusterName().equals(clusterName)) { + log.info( + "Deleting a cluster [{}] with ID [{}].", + clusterDetail.getClusterName(), + clusterDetail.getClusterId()); + workspace.clusters().permanentDelete(clusterDetail.getClusterId()); + } } } @SneakyThrows - static void shutdown( - WorkspaceClient workspace, - String clusterId, - boolean preventClusterTermination, - boolean existingClusterUsed, - String executionTimestamp) { - // remove events file - workspace.dbfs().delete(DBFS_EVENTS_FILE); - - if (!(preventClusterTermination || existingClusterUsed)) { - // need to terminate cluster to have access to cluster logs + @Override + public void close() { + boolean existingClusterUsed = !"".equals(properties.getDevelopment().getExistingClusterId()); + + log.info("Deleting events file [{}]", dbfsEventsFile); + deleteEventsFile(); + + if (!(properties.getDevelopment().isPreventClusterTermination() || existingClusterUsed)) { + log.info("Terminating cluster [{}].", clusterId); workspace.clusters().delete(clusterId); workspace.clusters().waitGetClusterTerminated(clusterId); } - Path clusterLogs = Paths.get(DBFS_CLUSTER_LOGS + "/" + clusterId + "/driver/log4j-active.log"); - log.info("Waiting for the cluster logs to be available on DBFS under [{}]...", clusterLogs); + log.info("Deleting cluster logs from [{}].", clusterLogs); + dbfs.delete(clusterLogs.toAbsolutePath().toString()); + dbfs.delete(stdoutLogs.toAbsolutePath().toString()); + dbfs.delete(stdout.toAbsolutePath().toString()); + } + + public void deleteEventsFile() { + dbfs.delete(dbfsEventsFile); + } + + /** Fetches driver's stdout, stderr and log4j logs files */ + @SneakyThrows + public void fetchLogs() { + DatabricksEnvironmentProperties.Development development = properties.getDevelopment(); + if (development.isFetchLog4jLogs()) { + log.info("Fetching log4j logs"); + fetchLogs( + clusterLogs, + development.getLog4jLogsLocation() + "/" + executionTimestamp + "-cluster-log4j.log"); + } else { + log.info("Skipping fetching log4j logs."); + } + + if (development.isFetchStdout()) { + log.info("Fetching stdout logs"); + fetchLogs(stdoutLogs, development.getStdoutLocation() + "/" + executionTimestamp + "_stdout"); + } else { + log.info("Skipping fetching stdout logs."); + } + + if (development.isFetchStderr()) { + log.info("Fetching stderr logs"); + fetchLogs(stdout, development.getStderrLocation() + "/" + executionTimestamp + "_stderr"); + } else { + log.info("Skipping fetching stderr logs."); + } + } + + private void fetchLogs(Path databricksLocation, String logsLocation) throws IOException { + log.info("Waiting for the logs to be available under [{}]...", databricksLocation); await() .atMost(Duration.ofSeconds(300)) .pollInterval(Duration.ofSeconds(3)) .until( () -> { try { - return workspace.dbfs().getStatus(clusterLogs.toString()) != null; + return dbfs.getStatus(databricksLocation.toString()) != null; } catch (Exception e) { return false; } }); - - // fetch logs and move to local file - String logsLocation = "./build/" + executionTimestamp + "-cluster-log4j.log"; - log.info("Fetching cluster logs to [{}]", logsLocation); - writeLinesToFile( - logsLocation, workspace.dbfs().readAllLines(clusterLogs, StandardCharsets.UTF_8)); + log.info("Fetching logs to [{}].", logsLocation); + writeLinesToFile(logsLocation, dbfs.readAllLines(databricksLocation, StandardCharsets.UTF_8)); log.info("Logs fetched."); - - workspace.dbfs().delete(clusterLogs.toAbsolutePath().toString()); } @SneakyThrows - static List runScript( - WorkspaceClient workspace, String clusterId, String scriptName, String executionTimestamp) { + public List runScript(String scriptName) { // upload scripts String dbfsScriptPath = "dbfs:/databricks/openlineage/scripts/" + scriptName; log.info("Uploading script [{}] to [{}]", scriptName, dbfsScriptPath); String taskName = scriptName.replace(".py", ""); - workspace - .dbfs() - .write( - Paths.get(dbfsScriptPath), - readAllBytes( - Paths.get(Resources.getResource("databricks_notebooks/" + scriptName).getPath()))); + dbfs.write( + Paths.get(dbfsScriptPath), + readAllBytes( + Paths.get(Resources.getResource("databricks_notebooks/" + scriptName).getPath()))); log.info("The script [{}] has been uploaded to [{}].", scriptName, dbfsScriptPath); SparkPythonTask task = new SparkPythonTask(); @@ -203,12 +295,12 @@ static List runScript( submit.get(); log.info("PySpark task [{}] completed.", taskName); - return fetchEventsEmitted(workspace, scriptName, executionTimestamp); + return fetchEventsEmitted(scriptName); } @SneakyThrows - private static Wait createCluster( - WorkspaceClient workspace, String clusterName, String sparkPlatformVersion) { + private Wait createCluster( + String clusterName, String sparkPlatformVersion) { HashMap sparkConf = new HashMap<>(); sparkConf.put("spark.openlineage.facets.debug.disabled", "false"); sparkConf.put("spark.openlineage.transport.type", "file"); @@ -219,6 +311,7 @@ private static Wait createCluster( CreateCluster createCluster = new CreateCluster() .setClusterName(clusterName) + .setDataSecurityMode(DataSecurityMode.SINGLE_USER) .setSparkVersion(sparkPlatformVersion) .setNodeTypeId(NODE_TYPE) .setAutoterminationMinutes(10L) @@ -228,6 +321,7 @@ private static Wait createCluster( new InitScriptInfo() .setWorkspace(new WorkspaceStorageInfo().setDestination(INIT_SCRIPT_FILE)))) .setSparkConf(sparkConf) + .setDataSecurityMode(DataSecurityMode.SINGLE_USER) .setClusterLogConf( new ClusterLogConf() .setDbfs(new DbfsStorageInfo().setDestination(DBFS_CLUSTER_LOGS))); @@ -237,8 +331,8 @@ private static Wait createCluster( return workspace.clusters().create(createCluster); } - private static String getSparkPlatformVersion() { - String sparkVersion = DatabricksDynamicParameter.SparkVersion.resolve(); + private String getSparkPlatformVersion() { + String sparkVersion = properties.cluster.getSparkVersion(); if (!PLATFORM_VERSIONS_NAMES.containsKey(sparkVersion)) { log.error("Unsupported [spark.version] for databricks test: [{}].", sparkVersion); } @@ -253,7 +347,7 @@ private static String getSparkPlatformVersion() { * restart the cluster if you change the jar and want to use it. */ @SneakyThrows - private static void uploadOpenLineageJar(WorkspaceClient workspace) { + private void uploadOpenLineageJar() { Path jarFile = Files.list(Paths.get("../build/libs/")) .filter(p -> p.getFileName().toString().startsWith("openlineage-spark_")) @@ -263,25 +357,24 @@ private static void uploadOpenLineageJar(WorkspaceClient workspace) { // make sure dbfs:/databricks/openlineage/ exists try { - workspace.dbfs().mkdirs("dbfs:/databricks"); + dbfs.mkdirs("dbfs:/databricks"); } catch (RuntimeException e) { } try { - workspace.dbfs().mkdirs("dbfs:/databricks/openlineage/"); + dbfs.mkdirs("dbfs:/databricks/openlineage/"); } catch (RuntimeException e) { } // clear other jars in DBFS - if (workspace.dbfs().list("dbfs:/databricks/openlineage/") != null) { - StreamSupport.stream( - workspace.dbfs().list("dbfs:/databricks/openlineage/").spliterator(), false) + if (dbfs.list("dbfs:/databricks/openlineage/") != null) { + StreamSupport.stream(dbfs.list("dbfs:/databricks/openlineage/").spliterator(), false) .filter(f -> f.getPath().contains("openlineage-spark")) .filter(f -> f.getPath().endsWith(".jar")) - .forEach(f -> workspace.dbfs().delete(f.getPath())); + .forEach(f -> dbfs.delete(f.getPath())); } String destination = "dbfs:/databricks/openlineage/" + jarFile.getFileName(); - uploadFileToDbfs(workspace, jarFile, destination); + uploadFileToDbfs(jarFile, destination); log.info("OpenLineage jar has been uploaded to [{}]", destination); } @@ -291,13 +384,13 @@ private static void uploadOpenLineageJar(WorkspaceClient workspace) { *

The script is used by the clusters to copy OpenLineage jar to the location where it can be * loaded by the driver. */ - private static void uploadInitializationScript(WorkspaceClient workspace) throws IOException { + private void uploadInitializationScript() throws IOException { String string = Resources.toString( Paths.get("../databricks/open-lineage-init-script.sh").toUri().toURL(), StandardCharsets.UTF_8); String encodedString = Base64.getEncoder().encodeToString(string.getBytes()); - workspace + this.workspace .workspace() .importContent( new Import() @@ -308,9 +401,9 @@ private static void uploadInitializationScript(WorkspaceClient workspace) throws } @SneakyThrows - private static void uploadFileToDbfs(WorkspaceClient workspace, Path jarFile, String toLocation) { + private void uploadFileToDbfs(Path jarFile, String toLocation) { FileInputStream fis = new FileInputStream(jarFile.toString()); - OutputStream outputStream = workspace.dbfs().getOutputStream(toLocation); + OutputStream outputStream = dbfs.getOutputStream(toLocation); // upload to DBFS -> 12MB file upload need to go in chunks smaller than 1MB each byte[] buf = new byte[500000]; // approx 0.5MB @@ -324,15 +417,18 @@ private static void uploadFileToDbfs(WorkspaceClient workspace, Path jarFile, St } @SneakyThrows - private static List fetchEventsEmitted( - WorkspaceClient workspace, String scriptName, String executionTimestamp) { - Path path = Paths.get(DBFS_EVENTS_FILE); + private List fetchEventsEmitted(String scriptName) { + Path path = Paths.get(dbfsEventsFile); log.info("Fetching events from [{}]...", path); - List eventsLines = workspace.dbfs().readAllLines(path, StandardCharsets.UTF_8); + List eventsLines = dbfs.readAllLines(path, StandardCharsets.UTF_8); log.info("There are [{}] events.", eventsLines.size()); - saveEventsLocally(scriptName, executionTimestamp, eventsLines); + if (properties.getDevelopment().isFetchEvents()) { + saveEventsLocally(scriptName, eventsLines); + } else { + log.info("Skipping fetching events logs."); + } return eventsLines.stream() .map(OpenLineageClientUtils::runEventFromJson) @@ -340,11 +436,16 @@ private static List fetchEventsEmitted( } /** Downloads the events locally for troubleshooting purposes */ - private static void saveEventsLocally( - String scriptName, String executionTimestamp, List lines) throws IOException { + private void saveEventsLocally(String scriptName, List lines) throws IOException { // The source file path is reused and deleted before every test. As long as the tests are not // executed concurrently, it should contain the events from the current test. - String eventsLocation = "./build/" + executionTimestamp + "-" + scriptName + "-events.ndjson"; + String eventsLocation = + properties.getDevelopment().getEventsFileLocation() + + "/" + + executionTimestamp + + "-" + + scriptName + + "-events.ndjson"; log.info("Fetching events to [{}]", eventsLocation); writeLinesToFile(eventsLocation, lines); log.info("Events fetched."); diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java index 266b370a12..6b41bfd22c 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksIntegrationTest.java @@ -5,23 +5,14 @@ package io.openlineage.spark.agent; -import static io.openlineage.spark.agent.DatabricksUtils.DBFS_EVENTS_FILE; -import static io.openlineage.spark.agent.DatabricksUtils.init; -import static io.openlineage.spark.agent.DatabricksUtils.platformVersion; -import static io.openlineage.spark.agent.DatabricksUtils.runScript; import static org.assertj.core.api.Assertions.assertThat; -import com.databricks.sdk.WorkspaceClient; -import com.databricks.sdk.core.DatabricksConfig; import io.openlineage.client.OpenLineage.ColumnLineageDatasetFacetFieldsAdditional; import io.openlineage.client.OpenLineage.InputDataset; import io.openlineage.client.OpenLineage.OutputDataset; import io.openlineage.client.OpenLineage.RunEvent; import io.openlineage.client.OpenLineage.RunEvent.EventType; import io.openlineage.client.OpenLineage.RunFacet; -import java.time.ZoneOffset; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; import java.util.List; import java.util.Map; import java.util.Optional; @@ -29,7 +20,6 @@ import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @@ -46,55 +36,65 @@ @SuppressWarnings("PMD.AvoidDuplicateLiterals") class DatabricksIntegrationTest { - private static WorkspaceClient workspace; - private static String clusterId; - private static final String executionTimestamp = - ZonedDateTime.now(ZoneOffset.UTC).format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); - - @BeforeAll - @SneakyThrows - public static void setup() { - DatabricksConfig config = - new DatabricksConfig() - .setHost(DatabricksDynamicParameter.Host.resolve()) - .setToken(DatabricksDynamicParameter.Token.resolve()); - - workspace = new WorkspaceClient(config); - clusterId = init(workspace); - } + private static final DatabricksEnvironment databricks = + new DatabricksEnvironment( + DatabricksEnvironment.DatabricksEnvironmentProperties.builder() + .workspace( + DatabricksEnvironment.DatabricksEnvironmentProperties.Workspace.builder() + .host(DatabricksDynamicParameter.Host.resolve()) + .token(DatabricksDynamicParameter.Token.resolve()) + .build()) + .cluster( + DatabricksEnvironment.DatabricksEnvironmentProperties.Cluster.builder() + .sparkVersion(DatabricksDynamicParameter.SparkVersion.resolve()) + .build()) + .development( + DatabricksEnvironment.DatabricksEnvironmentProperties.Development.builder() + .existingClusterId(DatabricksDynamicParameter.ClusterId.resolve()) + .preventClusterTermination( + Boolean.parseBoolean( + DatabricksDynamicParameter.PreventClusterTermination.resolve())) + .fetchLog4jLogs( + Boolean.parseBoolean(DatabricksDynamicParameter.FetchLog4jLogs.resolve())) + .log4jLogsLocation(DatabricksDynamicParameter.Log4jLogsLocation.resolve()) + .fetchStdout( + Boolean.parseBoolean(DatabricksDynamicParameter.FetchStdout.resolve())) + .stdoutLocation(DatabricksDynamicParameter.StdoutLocation.resolve()) + .fetchStderr( + Boolean.parseBoolean(DatabricksDynamicParameter.FetchStderr.resolve())) + .stderrLocation(DatabricksDynamicParameter.StderrLocation.resolve()) + .fetchEvents( + Boolean.parseBoolean(DatabricksDynamicParameter.FetchEvents.resolve())) + .eventsFileLocation(DatabricksDynamicParameter.EventsFileLocation.resolve()) + .build()) + .build()); + private final String platformVersion = databricks.getPlatformVersion(); @BeforeEach public void beforeEach() { - workspace.dbfs().delete(DBFS_EVENTS_FILE); + databricks.deleteEventsFile(); } @AfterAll public static void shutdown() { - if (clusterId != null) { - boolean existingClusterUsed = "".equals(DatabricksDynamicParameter.ClusterId.resolve()); - DatabricksUtils.shutdown( - workspace, - clusterId, - Boolean.parseBoolean(DatabricksDynamicParameter.PreventClusterTermination.resolve()), - existingClusterUsed, - executionTimestamp); - } + databricks.fetchLogs(); + databricks.close(); } @Test @SneakyThrows void testCreateTableAsSelect() { - List runEvents = runScript(workspace, clusterId, "ctas.py", executionTimestamp); + List runEvents = databricks.runScript("ctas.py"); RunEvent lastEvent = runEvents.get(runEvents.size() - 1); OutputDataset outputDataset = lastEvent.getOutputs().get(0); InputDataset inputDataset = lastEvent.getInputs().get(0); assertThat(outputDataset.getNamespace()).isEqualTo("dbfs"); - assertThat(outputDataset.getName()).isEqualTo("/user/hive/warehouse/ctas_" + platformVersion()); + assertThat(outputDataset.getName()).isEqualTo("/user/hive/warehouse/ctas_" + platformVersion); assertThat(inputDataset.getNamespace()).isEqualTo("dbfs"); - assertThat(inputDataset.getName()).isEqualTo("/user/hive/warehouse/temp_" + platformVersion()); + assertThat(inputDataset.getName()).isEqualTo("/user/hive/warehouse/temp_" + platformVersion); // test DatabricksEnvironmentFacetBuilder handler RunEvent eventWithDatabricksProperties = @@ -135,8 +135,7 @@ void testCreateTableAsSelect() { @Test @SneakyThrows void testNarrowTransformation() { - List runEvents = - runScript(workspace, clusterId, "narrow_transformation.py", executionTimestamp); + List runEvents = databricks.runScript("narrow_transformation.py"); assertThat(runEvents).isNotEmpty(); // assert start event exists @@ -162,14 +161,13 @@ void testNarrowTransformation() { assertThat(completeEvent).isPresent(); assertThat(completeEvent.get().getOutputs().get(0).getName()) - .isEqualTo("/data/path/to/output/narrow_transformation_" + platformVersion()); + .isEqualTo("/data/path/to/output/narrow_transformation_" + platformVersion); } @Test @SneakyThrows void testWideTransformation() { - List runEvents = - runScript(workspace, clusterId, "wide_transformation.py", executionTimestamp); + List runEvents = databricks.runScript("wide_transformation.py"); assertThat(runEvents).isNotEmpty(); // assert start event exists @@ -189,13 +187,12 @@ void testWideTransformation() { assertThat(completeEvent).isPresent(); assertThat(completeEvent.get().getOutputs().get(0).getName()) - .isEqualTo("/data/output/wide_transformation/result_" + platformVersion()); + .isEqualTo("/data/output/wide_transformation/result_" + platformVersion); } @Test void testWriteReadFromTableWithLocation() { - List runEvents = - runScript(workspace, clusterId, "dataset_names.py", executionTimestamp); + List runEvents = databricks.runScript("dataset_names.py"); // find complete event with output dataset containing name OutputDataset outputDataset = @@ -224,7 +221,7 @@ void testWriteReadFromTableWithLocation() { @Test @SneakyThrows void testMergeInto() { - List runEvents = runScript(workspace, clusterId, "merge_into.py", executionTimestamp); + List runEvents = databricks.runScript("merge_into.py"); RunEvent event = runEvents.stream() @@ -244,41 +241,41 @@ void testMergeInto() { .getAdditionalProperties(); assertThat(event.getOutputs()).hasSize(1); - assertThat(event.getOutputs().get(0).getName()).endsWith("events_" + platformVersion()); + assertThat(event.getOutputs().get(0).getName()).endsWith("events_" + platformVersion); assertThat(event.getInputs()).hasSize(2); assertThat(event.getInputs().stream().map(d -> d.getName()).collect(Collectors.toList())) .containsExactlyInAnyOrder( - "/user/hive/warehouse/test_db.db/updates_" + platformVersion(), - "/user/hive/warehouse/test_db.db/events_" + platformVersion()); + "/user/hive/warehouse/test_db.db/updates_" + platformVersion, + "/user/hive/warehouse/test_db.db/events_" + platformVersion); assertThat(fields).hasSize(2); assertThat(fields.get("last_updated_at").getInputFields()).hasSize(1); assertThat(fields.get("last_updated_at").getInputFields().get(0)) .hasFieldOrPropertyWithValue("namespace", "dbfs") .hasFieldOrPropertyWithValue( - "name", "/user/hive/warehouse/test_db.db/updates_" + platformVersion()) + "name", "/user/hive/warehouse/test_db.db/updates_" + platformVersion) .hasFieldOrPropertyWithValue("field", "updated_at"); assertThat(fields.get("event_id").getInputFields()).hasSize(2); assertThat( fields.get("event_id").getInputFields().stream() - .filter(e -> e.getName().endsWith("updates_" + platformVersion())) + .filter(e -> e.getName().endsWith("updates_" + platformVersion)) .findFirst() .get()) .hasFieldOrPropertyWithValue("namespace", "dbfs") .hasFieldOrPropertyWithValue( - "name", "/user/hive/warehouse/test_db.db/updates_" + platformVersion()) + "name", "/user/hive/warehouse/test_db.db/updates_" + platformVersion) .hasFieldOrPropertyWithValue("field", "event_id"); assertThat( fields.get("event_id").getInputFields().stream() - .filter(e -> e.getName().endsWith("events_" + platformVersion())) + .filter(e -> e.getName().endsWith("events_" + platformVersion)) .findFirst() .get()) .hasFieldOrPropertyWithValue("namespace", "dbfs") .hasFieldOrPropertyWithValue( - "name", "/user/hive/warehouse/test_db.db/events_" + platformVersion()) + "name", "/user/hive/warehouse/test_db.db/events_" + platformVersion) .hasFieldOrPropertyWithValue("field", "event_id"); } } From 50ba05b3466ffa082e50ea3ff645e154e489195e Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 30 Oct 2024 09:23:15 +0100 Subject: [PATCH 35/89] removal: Deprecated column level visitors (#3198) * Deprecated custom column level visitors have been removed Signed-off-by: Artur Owczarek --- .../column/CustomColumnLineageVisitor.java | 42 ---------- .../plan/column/CustomCollectorsUtils.java | 11 +-- .../LegacyColumnLineageVisitorsLoader.java | 63 -------------- .../CustomColumnLineageVisitorTestImpl.java | 44 ---------- .../column/CustomCollectorsUtilsTest.java | 82 ------------------- ...cle.plan.column.CustomColumnLineageVisitor | 1 - .../spark/spark_column_lineage.md | 43 ---------- 7 files changed, 1 insertion(+), 285 deletions(-) delete mode 100644 integration/spark/shared/src/main/java/io/openlineage/spark/agent/lifecycle/plan/column/CustomColumnLineageVisitor.java delete mode 100644 integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/LegacyColumnLineageVisitorsLoader.java delete mode 100644 integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/CustomColumnLineageVisitorTestImpl.java delete mode 100644 integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtilsTest.java delete mode 100644 integration/spark/spark3/src/test/resources/META-INF/services/io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/lifecycle/plan/column/CustomColumnLineageVisitor.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/lifecycle/plan/column/CustomColumnLineageVisitor.java deleted file mode 100644 index 9b2ffcbc8b..0000000000 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/lifecycle/plan/column/CustomColumnLineageVisitor.java +++ /dev/null @@ -1,42 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark.agent.lifecycle.plan.column; - -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; - -/** Empty interface left for existing custom visitor implementations. */ -@Deprecated -public interface CustomColumnLineageVisitor { - /** - * Collect inputs for a given {@link LogicalPlan}. Column level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Input information - * should be put into builder. - * - * @param node - * @param builder - */ - void collectInputs(LogicalPlan node, ColumnLevelLineageBuilder builder); - - /** - * Collect outputs for a given {@link LogicalPlan}. Column level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Output information - * should be put into builder. - * - * @param node - * @param builder - */ - void collectOutputs(LogicalPlan node, ColumnLevelLineageBuilder builder); - - /** - * Collect expressions for a given {@link LogicalPlan}. Column level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Expression - * dependency information should be put into builder. - * - * @param node - * @param builder - */ - void collectExpressionDependencies(LogicalPlan node, ColumnLevelLineageBuilder builder); -} diff --git a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtils.java b/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtils.java index 4cd8a8852d..bb7e1e3009 100644 --- a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtils.java +++ b/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtils.java @@ -8,8 +8,6 @@ import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageContext; import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageVisitor; import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; @@ -39,13 +37,6 @@ static void collectExpressionDependencies(ColumnLevelLineageContext context, Log * visitors based on the Spark version and the libraries in use. */ private static List getCollectors(ColumnLevelLineageContext context) { - return concatLists( - LegacyColumnLineageVisitorsLoader.getVisitors(), - context.getOlContext().getColumnLevelLineageVisitors()); - } - - private static List concatLists( - List list1, List list2) { - return Stream.concat(list1.stream(), list2.stream()).collect(Collectors.toList()); + return context.getOlContext().getColumnLevelLineageVisitors(); } } diff --git a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/LegacyColumnLineageVisitorsLoader.java b/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/LegacyColumnLineageVisitorsLoader.java deleted file mode 100644 index 7437dc9853..0000000000 --- a/integration/spark/spark3/src/main/java/io/openlineage/spark3/agent/lifecycle/plan/column/LegacyColumnLineageVisitorsLoader.java +++ /dev/null @@ -1,63 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ -package io.openlineage.spark3.agent.lifecycle.plan.column; - -import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageContext; -import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageVisitor; -import io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor; -import java.util.List; -import java.util.ServiceLoader; -import java.util.Spliterator; -import java.util.Spliterators; -import java.util.stream.Collectors; -import java.util.stream.StreamSupport; -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; - -/** - * Utility class responsible for loading deprecated `CustomColumnLineageVisitor` implementations and - * converting them to the new `ColumnLevelLineageVisitor` interface. - */ -public class LegacyColumnLineageVisitorsLoader { - - /** - * Loads the deprecated `CustomColumnLineageVisitor` implementations using the ServiceLoader and - * converts them to `ColumnLevelLineageVisitor` instances. - * - * @return a list of `ColumnLevelLineageVisitor` instances converted from the deprecated visitors. - */ - static List getVisitors() { - ServiceLoader loader = - ServiceLoader.load(CustomColumnLineageVisitor.class); - - return StreamSupport.stream( - Spliterators.spliteratorUnknownSize( - loader.iterator(), Spliterator.IMMUTABLE & Spliterator.DISTINCT), - false) - .map(LegacyColumnLineageVisitorsLoader::fromLegacyInterface) - .collect(Collectors.toList()); - } - - /** Converts a deprecated `CustomColumnLineageVisitor` to a `ColumnLevelLineageVisitor`. */ - private static ColumnLevelLineageVisitor fromLegacyInterface( - CustomColumnLineageVisitor customVisitor) { - return new ColumnLevelLineageVisitor() { - @Override - public void collectInputs(ColumnLevelLineageContext context, LogicalPlan node) { - customVisitor.collectInputs(node, context.getBuilder()); - } - - @Override - public void collectOutputs(ColumnLevelLineageContext context, LogicalPlan node) { - customVisitor.collectOutputs(node, context.getBuilder()); - } - - @Override - public void collectExpressionDependencies( - ColumnLevelLineageContext context, LogicalPlan node) { - customVisitor.collectExpressionDependencies(node, context.getBuilder()); - } - }; - } -} diff --git a/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/CustomColumnLineageVisitorTestImpl.java b/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/CustomColumnLineageVisitorTestImpl.java deleted file mode 100644 index 5a0530de8b..0000000000 --- a/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/CustomColumnLineageVisitorTestImpl.java +++ /dev/null @@ -1,44 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark3.agent.lifecycle.plan; - -import static org.mockito.Mockito.mock; - -import io.openlineage.client.utils.DatasetIdentifier; -import io.openlineage.spark.agent.lifecycle.plan.column.ColumnLevelLineageBuilder; -import io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor; -import org.apache.spark.sql.catalyst.expressions.ExprId; -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; - -public class CustomColumnLineageVisitorTestImpl implements CustomColumnLineageVisitor { - public static final String OUTPUT_COL_NAME = "outputCol"; - public static final String INPUT_COL_NAME = "inputCol"; - public static LogicalPlan child = mock(LogicalPlan.class); - - public static ExprId childExprId = mock(ExprId.class); - public static ExprId parentExprId = mock(ExprId.class); - - @Override - public void collectInputs(LogicalPlan node, ColumnLevelLineageBuilder builder) { - if (node.equals(child)) { - builder.addInput(childExprId, mock(DatasetIdentifier.class), INPUT_COL_NAME); - } - } - - @Override - public void collectOutputs(LogicalPlan node, ColumnLevelLineageBuilder builder) { - if (node.equals(child)) { - builder.addOutput(parentExprId, OUTPUT_COL_NAME); - } - } - - @Override - public void collectExpressionDependencies(LogicalPlan node, ColumnLevelLineageBuilder builder) { - if (node.equals(child)) { - builder.addDependency(parentExprId, childExprId); - } - } -} diff --git a/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtilsTest.java b/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtilsTest.java deleted file mode 100644 index 6cb01341eb..0000000000 --- a/integration/spark/spark3/src/test/java/io/openlineage/spark3/agent/lifecycle/plan/column/CustomCollectorsUtilsTest.java +++ /dev/null @@ -1,82 +0,0 @@ -/* -/* Copyright 2018-2024 contributors to the OpenLineage project -/* SPDX-License-Identifier: Apache-2.0 -*/ - -package io.openlineage.spark3.agent.lifecycle.plan.column; - -import static io.openlineage.spark3.agent.lifecycle.plan.CustomColumnLineageVisitorTestImpl.INPUT_COL_NAME; -import static io.openlineage.spark3.agent.lifecycle.plan.CustomColumnLineageVisitorTestImpl.OUTPUT_COL_NAME; -import static io.openlineage.spark3.agent.lifecycle.plan.CustomColumnLineageVisitorTestImpl.child; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import io.openlineage.client.OpenLineage; -import io.openlineage.spark.agent.util.ScalaConversionUtils; -import io.openlineage.spark.api.OpenLineageContext; -import io.openlineage.spark.api.SparkOpenLineageConfig; -import java.net.URI; -import java.util.Arrays; -import java.util.Optional; -import lombok.SneakyThrows; -import lombok.extern.slf4j.Slf4j; -import org.apache.spark.scheduler.SparkListenerEvent; -import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan; -import org.apache.spark.sql.execution.QueryExecution; -import org.junit.jupiter.api.Test; -import org.mockito.Mockito; - -@Slf4j -class CustomCollectorsUtilsTest { - - static LogicalPlan plan = mock(LogicalPlan.class); - OpenLineageContext context = mock(OpenLineageContext.class); - QueryExecution queryExecution = mock(QueryExecution.class); - - @Test - @SneakyThrows - void testCustomCollectorsAreApplied() { - OpenLineage openLineage = new OpenLineage(new URI("some-url")); - when(plan.children()) - .thenReturn( - scala.collection.JavaConverters.collectionAsScalaIterableConverter(Arrays.asList(child)) - .asScala() - .toSeq()); - when(context.getQueryExecution()).thenReturn(Optional.of(queryExecution)); - when(queryExecution.optimizedPlan()).thenReturn(plan); - when(child.output()).thenReturn(ScalaConversionUtils.asScalaSeqEmpty()); - when(plan.output()).thenReturn(ScalaConversionUtils.asScalaSeqEmpty()); - when(child.children()).thenReturn(ScalaConversionUtils.asScalaSeqEmpty()); - when(context.getOpenLineage()).thenReturn(openLineage); - when(context.getOpenLineageConfig()).thenReturn(new SparkOpenLineageConfig()); - - Mockito.doCallRealMethod().when(plan).foreach(any()); - Mockito.doCallRealMethod().when(child).foreach(any()); - - OpenLineage.SchemaDatasetFacet outputSchema = - openLineage.newSchemaDatasetFacet( - Arrays.asList( - openLineage - .newSchemaDatasetFacetFieldsBuilder() - .name(OUTPUT_COL_NAME) - .type("string") - .build())); - - OpenLineage.ColumnLineageDatasetFacet facet = - ColumnLevelLineageUtils.buildColumnLineageDatasetFacet( - mock(SparkListenerEvent.class), context, outputSchema) - .get(); - - assertEquals( - INPUT_COL_NAME, - facet - .getFields() - .getAdditionalProperties() - .get(OUTPUT_COL_NAME) - .getInputFields() - .get(0) - .getField()); - } -} diff --git a/integration/spark/spark3/src/test/resources/META-INF/services/io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor b/integration/spark/spark3/src/test/resources/META-INF/services/io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor deleted file mode 100644 index 71d006da8a..0000000000 --- a/integration/spark/spark3/src/test/resources/META-INF/services/io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor +++ /dev/null @@ -1 +0,0 @@ -io.openlineage.spark3.agent.lifecycle.plan.CustomColumnLineageVisitorTestImpl \ No newline at end of file diff --git a/website/docs/integrations/spark/spark_column_lineage.md b/website/docs/integrations/spark/spark_column_lineage.md index 05b811ff66..5e53a76282 100644 --- a/website/docs/integrations/spark/spark_column_lineage.md +++ b/website/docs/integrations/spark/spark_column_lineage.md @@ -92,46 +92,3 @@ To unravel two dependencies implement following logic: The inputs are also mapped for all dataset dependencies. The result is added to each output. Finally, the list of outputs with all their inputs is mapped to `ColumnLineageDatasetFacetFields` object. - -## Writing custom extensions - -Spark framework is known for its great ability to be extended by custom libraries capable of reading or writing to anything. In case of having a custom implementation, we prepared an ability to extend column-level lineage implementation to be able to retrieve information from other input or output LogicalPlan nodes. - -Creating such an extension requires implementing a following interface: - -``` -/** Interface for implementing custom collectors of column-level lineage. */ -interface CustomColumnLineageVisitor { - - /** - * Collect inputs for a given {@link LogicalPlan}. Column-level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Input information - * should be put into builder. - * - * @param node - * @param builder - */ - void collectInputs(LogicalPlan node, ColumnLevelLineageBuilder builder); - - /** - * Collect outputs for a given {@link LogicalPlan}. Column-level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Output information - * should be put into builder. - * - * @param node - * @param builder - */ - void collectOutputs(LogicalPlan node, ColumnLevelLineageBuilder builder); - - /** - * Collect expressions for a given {@link LogicalPlan}. Column-level lineage mechanism traverses - * LogicalPlan on its node. This method will be called for each traversed node. Expression - * dependency information should be put into builder. - * - * @param node - * @param builder - */ - void collectExpressionDependencies(LogicalPlan node, ColumnLevelLineageBuilder builder); -} -``` -and making it available for Service Loader (implementation class name has to be put in a resource file `META-INF/services/io.openlineage.spark.agent.lifecycle.plan.column.CustomColumnLineageVisitor`). From b2ccfae2128dea4ebf3ca29061455ddb71966eee Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 30 Oct 2024 13:03:40 +0100 Subject: [PATCH 36/89] Move Spark release to the machine type common to other Java release jobs, set version using common method (#3200) Signed-off-by: Maciej Obuchowski --- .circleci/continue_config.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index a3ab9fc5be..da5781b790 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -557,8 +557,8 @@ jobs: release-integration-spark: working_directory: ~/openlineage/integration/spark - docker: - - image: cimg/openjdk:17.0 + machine: + image: ubuntu-2404:current steps: - *checkout_project_root - run: @@ -569,6 +569,7 @@ jobs: - v1-release-client-java-{{ checksum "/tmp/checksum.txt" }} - attach_workspace: at: ~/ + - set_java_version - run: | # Get, then decode the GPG private key used to sign *.jar export ORG_GRADLE_PROJECT_signingKey=$(echo $GPG_SIGNING_KEY | base64 -d) @@ -576,8 +577,8 @@ jobs: export RELEASE_USERNAME=$(echo $OSSRH_TOKEN_USERNAME) # Publish *.jar - ./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.12 -Pjava.compile.home=/usr/local/jdk-17.0.11 - ./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.13 -Pjava.compile.home=/usr/local/jdk-17.0.11 + ./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.12 -Pjava.compile.home=${JAVA17_HOME} + ./gradlew --no-daemon --console=plain clean publishToSonatype closeAndReleaseSonatypeStagingRepository --info -Pscala.binary.version=2.13 -Pjava.compile.home=${JAVA17_HOME} - store_artifacts: path: ./build/libs destination: spark-client-artifacts From 0e98c8c65da715525a6fe9821e6d2dec19f4d9cd Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 30 Oct 2024 13:51:23 +0100 Subject: [PATCH 37/89] Add JobTypeJobFacets to dbt integration's model, test, snapshot events (#3199) Signed-off-by: Maciej Obuchowski --- .../common/provider/dbt/processor.py | 23 ++++++++++++++++-- .../common/tests/dbt/build/result.json | 24 ++++++++++++++++--- .../common/tests/dbt/large/result.json | 5 ++++ .../common/tests/dbt/small/result.json | 5 ++++ .../common/tests/dbt/snapshot/result.json | 5 ++++ 5 files changed, 57 insertions(+), 5 deletions(-) diff --git a/integration/common/openlineage/common/provider/dbt/processor.py b/integration/common/openlineage/common/provider/dbt/processor.py index c4955e06d2..6d24e39cae 100644 --- a/integration/common/openlineage/common/provider/dbt/processor.py +++ b/integration/common/openlineage/common/provider/dbt/processor.py @@ -19,6 +19,7 @@ data_quality_assertions_dataset, datasource_dataset, documentation_dataset, + job_type_job, output_statistics_output_dataset, parent_run, schema_dataset, @@ -259,12 +260,14 @@ def parse_execution(self, context: DbtRunContext, nodes: Dict) -> DbtEvents: run_id = str(generate_new_uuid()) if name.startswith("snapshot."): + jobType = "SNAPSHOT" job_name = ( f"{output_node['database']}.{output_node['schema']}" f".{self.removeprefix(run['unique_id'], 'snapshot.')}" + (".build.snapshot" if self.command == "build" else ".snapshot") ) else: + jobType = "MODEL" job_name = ( f"{output_node['database']}.{output_node['schema']}" f".{self.removeprefix(run['unique_id'], 'model.')}" @@ -276,7 +279,14 @@ def parse_execution(self, context: DbtRunContext, nodes: Dict) -> DbtEvents: else: sql = output_node["compiled_sql"] - job_facets: Dict[str, JobFacet] = {} + job_facets: Dict[str, JobFacet] = { + "jobType": job_type_job.JobTypeJobFacet( + jobType=jobType, + integration="DBT", + processingType="BATCH", + producer=self.producer, + ) + } if sql: job_facets["sql"] = sql_job.SQLJobFacet(sql) @@ -327,6 +337,15 @@ def parse_test(self, context: DbtRunContext, nodes: Dict) -> DbtEvents: + (".build.test" if self.command == "build" else ".test") ) + job_facets: Dict[str, JobFacet] = { + "jobType": job_type_job.JobTypeJobFacet( + jobType="TEST", + integration="DBT", + processingType="BATCH", + producer=self.producer, + ) + } + run_id = str(generate_new_uuid()) dataset_facets: Dict[str, InputDatasetFacet] = {"dataQualityAssertions": assertion_facet} events.add( @@ -335,7 +354,7 @@ def parse_test(self, context: DbtRunContext, nodes: Dict) -> DbtEvents: started_at, completed_at, self.get_run(run_id), - Job(self.job_namespace, job_name), + Job(namespace=self.job_namespace, name=job_name, facets=job_facets), [ InputDataset( namespace=namespace, diff --git a/integration/common/tests/dbt/build/result.json b/integration/common/tests/dbt/build/result.json index d101396940..9fa215c917 100644 --- a/integration/common/tests/dbt/build/result.json +++ b/integration/common/tests/dbt/build/result.json @@ -43,7 +43,13 @@ } ], "job": { - "facets": {}, + "facets": { + "jobType": { + "jobType": "MODEL", + "integration": "DBT", + "processingType": "BATCH" + } + }, "name": "random-gcp-project.dbt_test1.dbt_bigquery_test.test_first_dbt_model.build.run", "namespace": "job-namespace" }, @@ -77,7 +83,13 @@ } ], "job": { - "facets": {}, + "facets": { + "jobType": { + "jobType": "SNAPSHOT", + "integration": "DBT", + "processingType": "BATCH" + } + }, "name": "random-gcp-project.dbt_test1.dbt_bigquery_test.orders_snapshot.build.snapshot", "namespace": "job-namespace" }, @@ -154,7 +166,13 @@ } ], "job": { - "facets": {}, + "facets": { + "jobType": { + "jobType": "TEST", + "integration": "DBT", + "processingType": "BATCH" + } + }, "name": "random-gcp-project.dbt_test1.dbt_bigquery_test.test_first_dbt_model.build.test", "namespace": "job-namespace" }, diff --git a/integration/common/tests/dbt/large/result.json b/integration/common/tests/dbt/large/result.json index cea2d87bdd..02380a89b2 100644 --- a/integration/common/tests/dbt/large/result.json +++ b/integration/common/tests/dbt/large/result.json @@ -358,6 +358,11 @@ "facets": { "sql": { "query": "with customers as (\n\n select * from DEMO_DB.public.stg_customers\n\n),\n\norders as (\n\n select * from DEMO_DB.public.stg_orders\n\n),\n\npayments as (\n\n select * from DEMO_DB.public.stg_payments\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by 1\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders using (order_id)\n\n group by 1\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders using (customer_id)\n\n left join customer_payments using (customer_id)\n\n)\n\nselect * from final" + }, + "jobType": { + "jobType": "MODEL", + "integration": "DBT", + "processingType": "BATCH" } } }, diff --git a/integration/common/tests/dbt/small/result.json b/integration/common/tests/dbt/small/result.json index 25fcd81e4f..9113cee93c 100644 --- a/integration/common/tests/dbt/small/result.json +++ b/integration/common/tests/dbt/small/result.json @@ -37,6 +37,11 @@ "facets": { "sql": { "query": "select *\nfrom `random-gcp-project`.`dbt_test1`.`source_table`\nwhere id = 1" + }, + "jobType": { + "jobType": "MODEL", + "integration": "DBT", + "processingType": "BATCH" } } }, diff --git a/integration/common/tests/dbt/snapshot/result.json b/integration/common/tests/dbt/snapshot/result.json index 42bd662f03..985bd8f6df 100644 --- a/integration/common/tests/dbt/snapshot/result.json +++ b/integration/common/tests/dbt/snapshot/result.json @@ -102,6 +102,11 @@ "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/SQLJobFacet.json#/$defs/SQLJobFacet", "query": "\n\n\n\nselect * from \"postgres\".\"postgres\".\"my_second_dbt_model\"\n" + }, + "jobType": { + "jobType": "SNAPSHOT", + "integration": "DBT", + "processingType": "BATCH" } }, "name": "postgres.postgres.snapshot_test.orders_snapshot.snapshot", From c7a88edd2dbf58597fac9dd8afbfc73095d2d553 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Thu, 31 Oct 2024 16:11:00 +0100 Subject: [PATCH 38/89] test: Exit early when the Databricks integration test has unexpected version of the Spark passed. (#3206) Signed-off-by: Artur Owczarek --- .../spark/agent/DatabricksEnvironment.java | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java index 460fa9945b..5552a1b70d 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java @@ -332,13 +332,18 @@ private Wait createCluster( } private String getSparkPlatformVersion() { - String sparkVersion = properties.cluster.getSparkVersion(); + String sparkVersion = properties.getCluster().getSparkVersion(); if (!PLATFORM_VERSIONS_NAMES.containsKey(sparkVersion)) { - log.error("Unsupported [spark.version] for databricks test: [{}].", sparkVersion); + log.error( + "Unsupported [spark.version] for Databricks test: [{}]. Supported versions are {}", + sparkVersion, + PLATFORM_VERSIONS_NAMES.keySet()); + throw new IllegalStateException("Unsupported [spark.version] for Databricks"); } - log.info("Databricks version: [{}].", PLATFORM_VERSIONS_NAMES.get(sparkVersion)); - return PLATFORM_VERSIONS_NAMES.get(sparkVersion); + String platformVersion = PLATFORM_VERSIONS_NAMES.get(sparkVersion); + log.info("Databricks version: [{}].", platformVersion); + return platformVersion; } /** From 737229a2568f2919ab1f7cf6e5f30879a48d180d Mon Sep 17 00:00:00 2001 From: Massy Bourennani Date: Fri, 1 Nov 2024 12:39:24 +0100 Subject: [PATCH 39/89] [Fix][Integration/dbt] Parse dbt source tests (#3208) * Fix: Consider dbt sources when looking for test results Signed-off-by: Massy Bourennani * format Signed-off-by: Massy Bourennani --- .../common/provider/dbt/processor.py | 3 +- integration/common/tests/dbt/test/result.json | 61 ++++++++ .../tests/dbt/test/target/manifest.json | 146 +++++++++++++++++- .../tests/dbt/test/target/run_results.json | 21 +++ .../common/tests/dbt/test_dbt_local.py | 1 + 5 files changed, 230 insertions(+), 2 deletions(-) diff --git a/integration/common/openlineage/common/provider/dbt/processor.py b/integration/common/openlineage/common/provider/dbt/processor.py index 6d24e39cae..7db270ceb3 100644 --- a/integration/common/openlineage/common/provider/dbt/processor.py +++ b/integration/common/openlineage/common/provider/dbt/processor.py @@ -317,7 +317,8 @@ def parse_test(self, context: DbtRunContext, nodes: Dict) -> DbtEvents: assertions = self.parse_assertions(context, nodes) events = DbtEvents() - for name, node in context.manifest["nodes"].items(): + manifest_nodes = {**context.manifest["nodes"], **context.manifest["sources"]} + for name, node in manifest_nodes.items(): if not name.startswith("model.") and not name.startswith("source."): continue if len(assertions[name]) == 0: diff --git a/integration/common/tests/dbt/test/result.json b/integration/common/tests/dbt/test/result.json index 484f3a004c..3d5a141d5a 100644 --- a/integration/common/tests/dbt/test/result.json +++ b/integration/common/tests/dbt/test/result.json @@ -75,6 +75,25 @@ "runId": "c11f2efd-4415-45fc-8081-10d2aaa594d2" } }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "eventType": "START", + "inputs": [ + { + "name": "random-gcp-project.dbt_test2.source_table", + "namespace": "bigquery" + } + ], + "job": { + "name": "random-gcp-project.dbt_test2.source.dbt_bigquery_test.dbt_test2.source_table.test", + "namespace": "dbt-test-namespace" + }, + "outputs": [], + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "run": { + "runId": "b901441a-7b4a-4a97-aa61-a200106b3ce3" + } + }, { "eventTime": "2021-08-25T11:00:25.277467+00:00", "eventType": "COMPLETE", @@ -292,5 +311,47 @@ }, "runId": "c11f2efd-4415-45fc-8081-10d2aaa594d2" } + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "eventType": "COMPLETE", + "inputs": [ + { + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "column": "id", + "assertion": "not_null", + "success": true + } + ] + } + }, + "name": "random-gcp-project.dbt_test2.source_table", + "namespace": "bigquery" + } + ], + "job": { + "facets": {}, + "name": "random-gcp-project.dbt_test2.source.dbt_bigquery_test.dbt_test2.source_table.test", + "namespace": "dbt-test-namespace" + }, + "outputs": [], + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "run": { + "facets": { + "parent": { + "job": {"name": "dbt-job-name", "namespace": "dbt"}, + "run": {"runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11"} + }, + "dbt_version": { + "version": "0.21.0" + } + }, + "runId": "b901441a-7b4a-4a97-aa61-a200106b3ce3" + } } ] diff --git a/integration/common/tests/dbt/test/target/manifest.json b/integration/common/tests/dbt/test/target/manifest.json index 38248d2d5b..6107541aa4 100644 --- a/integration/common/tests/dbt/test/target/manifest.json +++ b/integration/common/tests/dbt/test/target/manifest.json @@ -484,6 +484,96 @@ "relation_name": "`random-gcp-project`.`dbt_test1_dbt_test__audit`.`not_null_test_first_dbt_model_id`", "column_name": "id" }, + "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "test_metadata": { + "name": "not_null", + "kwargs": { + "column_name": "id", + "model": "{% if config.get('where') %}(select * from {{ source('dbt_test2', 'source_table') }} where {{config.get('where')}}) test_dbt_test2_source_table {% else %}{{ source('dbt_test2', 'source_table') }}{% endif %}" + }, + "namespace": null + }, + "compiled": true, + "resource_type": "test", + "depends_on": { + "macros": [ + "macro.dbt.test_not_null", + "macro.dbt.should_store_failures", + "macro.dbt.statement" + ], + "nodes": [ + "source.dbt_bigquery_test.dbt_test2.source_table" + ] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "random-gcp-project", + "schema": "dbt_test1_dbt_test__audit", + "fqn": [ + "dbt_bigquery_test", + "schema_test", + "source_not_null_dbt_test2_source_table_id" + ], + "unique_id": "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a", + "package_name": "dbt_bigquery_test", + "root_path": "/home/dbt/code/dbt-test", + "path": "schema_test/source_not_null_dbt_test2_source_table_id.sql", + "original_file_path": "models/example/schema.yml", + "name": "source_not_null_dbt_test2_source_table_id", + "alias": "source_not_null_dbt_test2_source_table_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": [ + "schema" + ], + "refs": [], + "sources": [ + [ + "dbt_test2", + "source_table" + ] + ], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": "target/compiled/dbt_bigquery_test/models/example/schema.yml/schema_test/source_not_null_dbt_test2_source_table_id.sql", + "build_path": "target/run/dbt_bigquery_test/models/example/schema.yml/schema_test/source_not_null_dbt_test2_source_table_id.sql", + "deferred": false, + "unrendered_config": {}, + "created_at": 1629731179, + "compiled_sql": "\n \n \n\nselect *\nfrom `random-gcp-project`.`dbt_test2`.`source_table`\nwhere id is null\n\n\n", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "`random-gcp-project`.`dbt_test1_dbt_test__audit`.`source_not_null_dbt_test2_source_table_id`", + "column_name": "id" + }, "test.dbt_bigquery_test.dbt_expectations_expect_column_values_to_not_be_null_test_first_dbt_model_id.2da7e7b96e": { "raw_sql": "{{ dbt_expectations.test_expect_column_values_to_not_be_null(**_dbt_schema_test_kwargs) }}{{ config(alias=\"dbt_expectations_expect_column_177e5e4ed27ab51efa28f8ff7a38e5fb\") }}", "test_metadata": { @@ -1526,6 +1616,52 @@ "unrendered_config": {}, "relation_name": "`random-gcp-project`.`dbt_test1`.`source_table`", "created_at": 1629731179 + }, + "source.dbt_bigquery_test.dbt_test2.source_table": { + "fqn": [ + "dbt_bigquery_test", + "example", + "dbt_test2", + "source_table" + ], + "database": "random-gcp-project", + "schema": "dbt_test2", + "unique_id": "source.dbt_bigquery_test.dbt_test2.source_table", + "package_name": "dbt_bigquery_test", + "root_path": "/home/dbt/code/dbt-test", + "path": "models/example/schema.yml", + "original_file_path": "models/example/schema.yml", + "name": "source_table", + "source_name": "dbt_test2", + "source_description": "", + "loader": "", + "identifier": "source_table", + "resource_type": "source", + "quoting": { + "database": null, + "schema": null, + "identifier": null, + "column": null + }, + "loaded_at_field": null, + "freshness": { + "warn_after": null, + "error_after": null, + "filter": null + }, + "external": null, + "description": "", + "columns": {}, + "meta": {}, + "source_meta": {}, + "tags": [], + "config": { + "enabled": true + }, + "patch_path": null, + "unrendered_config": {}, + "relation_name": "`random-gcp-project`.`dbt_test2`.`source_table`", + "created_at": 1629731179 } }, "macros": {}, @@ -1594,7 +1730,11 @@ "test.dbt_bigquery_test.not_null_test_third_dbt_model_second_id.808ed9e604": [ "model.dbt_bigquery_test.test_third_dbt_model" ], - "source.dbt_bigquery_test.dbt_test1.source_table": [] + "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a": [ + "source.dbt_bigquery_test.dbt_test2.source_table" + ], + "source.dbt_bigquery_test.dbt_test1.source_table": [], + "source.dbt_bigquery_test.dbt_test2.source_table": [] }, "child_map": { "model.dbt_bigquery_test.test_third_dbt_model": [ @@ -1634,8 +1774,12 @@ "test.dbt_bigquery_test.not_null_test_third_dbt_model_id.ee37a1e1fb": [], "test.dbt_bigquery_test.unique_test_third_dbt_model_second_id.f3c38f3b89": [], "test.dbt_bigquery_test.not_null_test_third_dbt_model_second_id.808ed9e604": [], + "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a": [], "source.dbt_bigquery_test.dbt_test1.source_table": [ "model.dbt_bigquery_test.test_second_parallel_dbt_model" + ], + "source.dbt_bigquery_test.dbt_test2.source_table": [ + "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a" ] } } diff --git a/integration/common/tests/dbt/test/target/run_results.json b/integration/common/tests/dbt/test/target/run_results.json index 8e2c375c22..5195763815 100644 --- a/integration/common/tests/dbt/test/target/run_results.json +++ b/integration/common/tests/dbt/test/target/run_results.json @@ -279,6 +279,27 @@ "message": null, "failures": 0, "unique_id": "test.dbt_bigquery_test.unique_test_third_dbt_model_second_id.f3c38f3b89" + }, + { + "status": "pass", + "timing": [ + { + "name": "compile", + "started_at": "2021-08-23T15:06:24.008644Z", + "completed_at": "2021-08-23T15:06:24.031682Z" + }, + { + "name": "execute", + "started_at": "2021-08-23T15:06:24.034392Z", + "completed_at": "2021-08-23T15:06:27.418481Z" + } + ], + "thread_id": "Thread-1", + "execution_time": 3.411060333251953, + "adapter_response": {}, + "message": null, + "failures": 0, + "unique_id": "test.dbt_bigquery_test.source_not_null_dbt_test2_source_table_id.a44e73432a" } ], "elapsed_time": 25.34427523612976, diff --git a/integration/common/tests/dbt/test_dbt_local.py b/integration/common/tests/dbt/test_dbt_local.py index 4da6eb8637..53b9be9011 100644 --- a/integration/common/tests/dbt/test_dbt_local.py +++ b/integration/common/tests/dbt/test_dbt_local.py @@ -79,6 +79,7 @@ def test_dbt_parse_dbt_test_event(mock_datetime, mock_uuid, parent_run_metadata) "1a69c0a7-04bb-408b-980e-cbbfb1831ef7", "f99310b4-339a-4381-ad3e-c1b95c24ff11", "c11f2efd-4415-45fc-8081-10d2aaa594d2", + "b901441a-7b4a-4a97-aa61-a200106b3ce3", ] processor = DbtLocalArtifactProcessor( From f851d773eed5b1b0ba04cab1c3ffcea618a250be Mon Sep 17 00:00:00 2001 From: Jakub Dardzinski Date: Sun, 3 Nov 2024 17:07:21 +0100 Subject: [PATCH 40/89] Fix edit this page link. (#3211) Signed-off-by: Jakub Dardzinski --- .circleci/config.yml | 2 +- dev/filter_matrix.py | 3 +++ website/docusaurus.config.js | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0a59f3b5b5..e1cd877a7a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -154,7 +154,7 @@ jobs: pip install pyyaml==6.0.1 python dev/filter_approvals.py - run: | - export IS_FULL_TESTS=$(gh pr view --json labels | jq 'any(.currentBranch.labels[]; .name == "full-tests")') + export IS_FULL_TESTS=$(gh pr view --json labels | jq 'any(.labels[]; .name == "full-tests")') echo $IS_FULL_TESTS if [ -z "$IS_FULL_TESTS" ] || [ "$IS_FULL_TESTS" == "0" ]; then pip install pyyaml==6.0.1 diff --git a/dev/filter_matrix.py b/dev/filter_matrix.py index 2183a75b4a..9ffa313282 100644 --- a/dev/filter_matrix.py +++ b/dev/filter_matrix.py @@ -11,6 +11,9 @@ if not jobs: continue + test_job = None + integration_test_job = None + for job in jobs: if "test-integration-spark" in job: test_job = job["test-integration-spark"] diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index a2c46db229..f97b4c304c 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -52,7 +52,7 @@ const config = { sidebarPath: require.resolve('./sidebars.js'), exclude: ['**/partials/**'], editUrl: - 'https://github.com/OpenLineage/docs/tree/main/', + 'https://github.com/OpenLineage/OpenLineage/tree/main/website/', }, theme: { customCss: require.resolve('./src/css/custom.css'), From 38c9e7b5fea6c931397808baa815029cb76b9a6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 19:27:08 +0100 Subject: [PATCH 41/89] build(deps): bump the client-java group in /client/java with 5 updates (#3214) Bumps the client-java group in /client/java with 5 updates: | Package | From | To | | --- | --- | --- | | org.apache.kafka:kafka-clients | `3.8.0` | `3.8.1` | | [com.gradleup.shadow](https://github.com/GradleUp/shadow) | `8.3.3` | `8.3.5` | | [com.google.cloud:google-cloud-datalineage](https://github.com/googleapis/google-cloud-java) | `0.44.0` | `0.45.0` | | [com.google.cloud:google-cloud-nio](https://github.com/googleapis/java-storage-nio) | `0.127.25` | `0.127.26` | | software.amazon.awssdk:bom | `2.29.1` | `2.29.6` | Updates `org.apache.kafka:kafka-clients` from 3.8.0 to 3.8.1 Updates `com.gradleup.shadow` from 8.3.3 to 8.3.5 - [Release notes](https://github.com/GradleUp/shadow/releases) - [Commits](https://github.com/GradleUp/shadow/compare/8.3.3...8.3.5) Updates `com.google.cloud:google-cloud-datalineage` from 0.44.0 to 0.45.0 - [Release notes](https://github.com/googleapis/google-cloud-java/releases) - [Changelog](https://github.com/googleapis/google-cloud-java/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/google-cloud-java/compare/v0.44.0...v0.45.0) Updates `com.google.cloud:google-cloud-nio` from 0.127.25 to 0.127.26 - [Release notes](https://github.com/googleapis/java-storage-nio/releases) - [Changelog](https://github.com/googleapis/java-storage-nio/blob/main/CHANGELOG.md) - [Commits](https://github.com/googleapis/java-storage-nio/compare/v0.127.25...v0.127.26) Updates `software.amazon.awssdk:bom` from 2.29.1 to 2.29.6 --- updated-dependencies: - dependency-name: org.apache.kafka:kafka-clients dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.gradleup.shadow dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-datalineage dependency-type: direct:production update-type: version-update:semver-minor dependency-group: client-java - dependency-name: com.google.cloud:google-cloud-nio dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: client-java ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- client/java/build.gradle | 4 ++-- client/java/transports-gcplineage/build.gradle | 4 ++-- client/java/transports-gcs/build.gradle | 4 ++-- client/java/transports-s3/build.gradle | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/client/java/build.gradle b/client/java/build.gradle index 5e1b0c16d4..676234b6b6 100644 --- a/client/java/build.gradle +++ b/client/java/build.gradle @@ -16,7 +16,7 @@ plugins { // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' id "org.openapi.generator" version "6.6.0" - id "com.gradleup.shadow" version "8.3.3" + id "com.gradleup.shadow" version "8.3.5" id "pmd" id "io.freefair.lombok" version "8.10.2" id 'io.github.gradle-nexus.publish-plugin' version '2.0.0' @@ -73,7 +73,7 @@ dependencies { implementation "io.micrometer:micrometer-core:${micrometerVersion}" compileOnly 'com.google.code.findbugs:jsr305:3.0.2' - compileOnly 'org.apache.kafka:kafka-clients:3.8.0' + compileOnly 'org.apache.kafka:kafka-clients:3.8.1' compileOnly 'com.amazonaws:amazon-kinesis-producer:0.15.12' compileOnly "org.projectlombok:lombok:${lombokVersion}" compileOnly "io.micrometer:micrometer-registry-statsd:${micrometerVersion}" diff --git a/client/java/transports-gcplineage/build.gradle b/client/java/transports-gcplineage/build.gradle index 3c975e0778..af136b9706 100644 --- a/client/java/transports-gcplineage/build.gradle +++ b/client/java/transports-gcplineage/build.gradle @@ -13,7 +13,7 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.3" + id "com.gradleup.shadow" version "8.3.5" id "pmd" id "io.freefair.lombok" version "8.10.2" } @@ -24,7 +24,7 @@ ext { dependencies { implementation("com.google.cloud.datalineage:producerclient-java8:1.0.0") - implementation("com.google.cloud:google-cloud-datalineage:0.44.0") + implementation("com.google.cloud:google-cloud-datalineage:0.45.0") } apply from: '../transports.build.gradle' diff --git a/client/java/transports-gcs/build.gradle b/client/java/transports-gcs/build.gradle index 5306bd0155..cce10d2c45 100644 --- a/client/java/transports-gcs/build.gradle +++ b/client/java/transports-gcs/build.gradle @@ -13,7 +13,7 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.3" + id "com.gradleup.shadow" version "8.3.5" id "pmd" id "io.freefair.lombok" version "8.10.2" } @@ -25,7 +25,7 @@ ext { dependencies { implementation('com.google.cloud:google-cloud-storage:2.44.1') - testImplementation('com.google.cloud:google-cloud-nio:0.127.25') + testImplementation('com.google.cloud:google-cloud-nio:0.127.26') } shadowJar { diff --git a/client/java/transports-s3/build.gradle b/client/java/transports-s3/build.gradle index 45a3d933d3..6fd04c07c4 100644 --- a/client/java/transports-s3/build.gradle +++ b/client/java/transports-s3/build.gradle @@ -13,7 +13,7 @@ plugins { id "com.adarshr.test-logger" version "3.2.0" // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 id 'com.diffplug.spotless' version '6.13.0' - id "com.gradleup.shadow" version "8.3.3" + id "com.gradleup.shadow" version "8.3.5" id "pmd" id "io.freefair.lombok" version "8.10.2" id "com.github.gmazzo.buildconfig" version "5.5.0" @@ -37,7 +37,7 @@ sourceSets { dependencies { compileOnly("com.google.code.findbugs:jsr305:3.0.2") - implementation(platform("software.amazon.awssdk:bom:2.29.1")) + implementation(platform("software.amazon.awssdk:bom:2.29.6")) implementation("software.amazon.awssdk:auth") implementation("software.amazon.awssdk:s3") implementation("software.amazon.awssdk:url-connection-client") From 7fd8762c5e637cff7f03727830e1ffe6394d0a17 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 3 Nov 2024 19:27:25 +0100 Subject: [PATCH 42/89] build(deps): bump the integration-spark group (#3212) Bumps the integration-spark group in /integration/spark with 3 updates: software.amazon.awssdk:bom, org.apache.kafka:kafka-clients and [org.apache.httpcomponents.client5:httpclient5](https://github.com/apache/httpcomponents-client). Updates `software.amazon.awssdk:bom` from 2.29.1 to 2.29.6 Updates `org.apache.kafka:kafka-clients` from 3.8.0 to 3.8.1 Updates `org.apache.httpcomponents.client5:httpclient5` from 5.4 to 5.4.1 - [Changelog](https://github.com/apache/httpcomponents-client/blob/rel/v5.4.1/RELEASE_NOTES.txt) - [Commits](https://github.com/apache/httpcomponents-client/compare/rel/v5.4...rel/v5.4.1) --- updated-dependencies: - dependency-name: software.amazon.awssdk:bom dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.apache.kafka:kafka-clients dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark - dependency-name: org.apache.httpcomponents.client5:httpclient5 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: integration-spark ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- integration/spark/app/build.gradle | 4 ++-- integration/spark/shared/build.gradle | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/integration/spark/app/build.gradle b/integration/spark/app/build.gradle index acb832740e..7018b3529d 100644 --- a/integration/spark/app/build.gradle +++ b/integration/spark/app/build.gradle @@ -90,7 +90,7 @@ dependencies { implementation(project(path: ":spark34", configuration: activeRuntimeElementsConfiguration)) implementation(project(path: ":spark35", configuration: activeRuntimeElementsConfiguration)) implementation(project(path: ":spark40", configuration: "scala213RuntimeElements")) - implementation("org.apache.httpcomponents.client5:httpclient5:5.4") + implementation("org.apache.httpcomponents.client5:httpclient5:5.4.1") compileOnly("org.apache.spark:spark-core_${scala}:${spark}") compileOnly("org.apache.spark:spark-sql_${scala}:${spark}") @@ -124,7 +124,7 @@ dependencies { exclude group: 'com.fasterxml.jackson.module' } - testImplementation(platform("software.amazon.awssdk:bom:2.29.1")) + testImplementation(platform("software.amazon.awssdk:bom:2.29.6")) testImplementation("software.amazon.awssdk:auth") testImplementation("software.amazon.awssdk:emr") testImplementation("software.amazon.awssdk:s3") diff --git a/integration/spark/shared/build.gradle b/integration/spark/shared/build.gradle index 3d3a1094de..63688f6b15 100644 --- a/integration/spark/shared/build.gradle +++ b/integration/spark/shared/build.gradle @@ -30,14 +30,14 @@ ext { bigqueryVersion = "0.41.0" databricksVersion = "0.1.4" junit5Version = "5.11.3" - kafkaClientsVersion = "3.8.0" + kafkaClientsVersion = "3.8.1" micrometerVersion = '1.13.6' mockitoVersion = "4.11.0" mockserverVersion = "5.14.0" postgresqlVersion = "42.7.4" sqlLiteVersion = "3.47.0.0" testcontainersVersion = "1.19.3" - awsSdkVersion = '2.29.1' + awsSdkVersion = '2.29.6' sparkVersion = project.findProperty("shared.spark.version") sparkSeries = sparkVersion.substring(0, 3) From 45e99cb71fe895a242b8bb3a8b3010eed0720ca2 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Mon, 4 Nov 2024 10:22:17 +0100 Subject: [PATCH 43/89] fix: Fix docusaurus-mdx-checker errors (#3217) Signed-off-by: Artur Owczarek --- website/blog/column-lineage/index.mdx | 50 ++++++------ .../client/java/partials/java_transport.md | 3 +- website/docs/integrations/airflow/airflow.md | 2 +- .../airflow/default-extractors.md | 2 +- .../airflow/extractors/custom-extractors.md | 2 +- .../airflow/extractors/extractor-testing.md | 2 +- .../integrations/airflow/job-hierarchy.md | 2 +- website/docs/integrations/airflow/manual.md | 2 +- website/docs/integrations/airflow/older.md | 6 +- website/docs/integrations/airflow/usage.md | 4 +- website/docs/integrations/flink.md | 10 +-- .../spark/configuration/spark_conf.md | 2 +- website/docs/spec/naming.md | 79 ++++++++++--------- 13 files changed, 88 insertions(+), 78 deletions(-) diff --git a/website/blog/column-lineage/index.mdx b/website/blog/column-lineage/index.mdx index 6a42880d7b..390530911a 100644 --- a/website/blog/column-lineage/index.mdx +++ b/website/blog/column-lineage/index.mdx @@ -27,31 +27,33 @@ In the process of implementing column-level lineage, Paweł and Julien contribut An example of a `columnLineage` facet in the outputs array of a lineage event: - { - "namespace": "{namespace of the outputdataset}", - "name": "{name of the output dataset}", - "facets": { - "schema": { - "fields": [ - { "name": "{first column of the output dataset}", "type": "{its type}"}, - { "name": "{second column of the output dataset}", "type": "{its type}"}, - ... - ] - }, - "columnLineage": { - "{first column of the output dataset}": { - "inputFields": [ - { "namespace": "{input dataset namespace}", name: "{input dataset name}", "field": "{input dataset column name}"}, - ... other inputs - ], - "transformationDescription": "identical", - "transformationType": "IDENTITY" - }, - "{second column of the output dataset}": ..., - ... - } - } +```json +{ + "namespace": "{namespace of the outputdataset}", + "name": "{name of the output dataset}", + "facets": { + "schema": { + "fields": [ + { "name": "{first column of the output dataset}", "type": "{its type}"}, + { "name": "{second column of the output dataset}", "type": "{its type}"}, + ... + ] + }, + "columnLineage": { + "{first column of the output dataset}": { + "inputFields": [ + { "namespace": "{input dataset namespace}", name: "{input dataset name}", "field": "{input dataset column name}"}, + ... other inputs + ], + "transformationDescription": "identical", + "transformationType": "IDENTITY" + }, + "{second column of the output dataset}": ..., + ... } + } +} +``` ### How it works diff --git a/website/docs/client/java/partials/java_transport.md b/website/docs/client/java/partials/java_transport.md index 2c3a26479d..3e0c5a8f99 100644 --- a/website/docs/client/java/partials/java_transport.md +++ b/website/docs/client/java/partials/java_transport.md @@ -101,7 +101,8 @@ spark.openlineage.transport.headers.X-Some-Extra-Header=abc spark.openlineage.transport.compression=gzip ``` -

URL parsing within Spark integration +
+URL parsing within Spark integration

You can supply http parameters using values in url, the parsed `spark.openlineage.*` properties are located in url as follows: diff --git a/website/docs/integrations/airflow/airflow.md b/website/docs/integrations/airflow/airflow.md index 254bc79b10..a7bb2c4d55 100644 --- a/website/docs/integrations/airflow/airflow.md +++ b/website/docs/integrations/airflow/airflow.md @@ -4,7 +4,7 @@ title: Apache Airflow --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/default-extractors.md b/website/docs/integrations/airflow/default-extractors.md index bbdcced73f..8cb012b2f2 100644 --- a/website/docs/integrations/airflow/default-extractors.md +++ b/website/docs/integrations/airflow/default-extractors.md @@ -4,7 +4,7 @@ title: Exposing Lineage in Airflow Operators --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/extractors/custom-extractors.md b/website/docs/integrations/airflow/extractors/custom-extractors.md index 8316cb9067..9bb1280ed6 100644 --- a/website/docs/integrations/airflow/extractors/custom-extractors.md +++ b/website/docs/integrations/airflow/extractors/custom-extractors.md @@ -4,7 +4,7 @@ title: Custom Extractors --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/extractors/extractor-testing.md b/website/docs/integrations/airflow/extractors/extractor-testing.md index 1e5d7d806f..0a891cfacd 100644 --- a/website/docs/integrations/airflow/extractors/extractor-testing.md +++ b/website/docs/integrations/airflow/extractors/extractor-testing.md @@ -4,7 +4,7 @@ title: Testing Custom Extractors --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/job-hierarchy.md b/website/docs/integrations/airflow/job-hierarchy.md index 90bcf28c4e..c8491aded2 100644 --- a/website/docs/integrations/airflow/job-hierarchy.md +++ b/website/docs/integrations/airflow/job-hierarchy.md @@ -4,7 +4,7 @@ title: Job Hierarchy --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/manual.md b/website/docs/integrations/airflow/manual.md index 19168858d5..b3a40d0c64 100644 --- a/website/docs/integrations/airflow/manual.md +++ b/website/docs/integrations/airflow/manual.md @@ -4,7 +4,7 @@ title: Manually Annotated Lineage --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, diff --git a/website/docs/integrations/airflow/older.md b/website/docs/integrations/airflow/older.md index 3c2a6465ac..2054a5b1ae 100644 --- a/website/docs/integrations/airflow/older.md +++ b/website/docs/integrations/airflow/older.md @@ -4,7 +4,7 @@ title: Supported Airflow versions --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, @@ -16,7 +16,7 @@ while the `openlineage-airflow` will primarily be updated for bug fixes. ##### Airflow 2.7+ This package **should not** be used starting with Airflow 2.7.0 and **can not** be used with Airflow 2.8+. -It was designed as Airflow's external integration that works mainly for Airflow versions <2.7. +It was designed as Airflow's external integration that works mainly for Airflow versions \<2.7. For Airflow 2.7+ use the native Airflow OpenLineage provider [package](https://airflow.apache.org/docs/apache-airflow-providers-openlineage) `apache-airflow-providers-openlineage`. @@ -44,6 +44,6 @@ openlineage.lineage_backend.OpenLineageBackend The OpenLineageBackend does not take into account manually configured inlets and outlets. -##### Airflow <2.1 +##### Airflow \<2.1 OpenLineage does not work with versions older than Airflow 2.1. diff --git a/website/docs/integrations/airflow/usage.md b/website/docs/integrations/airflow/usage.md index bdb5573848..4815b21347 100644 --- a/website/docs/integrations/airflow/usage.md +++ b/website/docs/integrations/airflow/usage.md @@ -4,7 +4,7 @@ title: Using the Airflow Integration --- :::caution -This page is about Airflow's external integration that works mainly for Airflow versions <2.7. +This page is about Airflow's external integration that works mainly for Airflow versions \<2.7. [If you're using Airflow 2.7+, look at native Airflow OpenLineage provider documentation.](https://airflow.apache.org/docs/apache-airflow-providers-openlineage/stable/index.html)

The ongoing development and enhancements will be focused on the `apache-airflow-providers-openlineage` package, @@ -14,7 +14,7 @@ while the `openlineage-airflow` will primarily be updated for bug fixes. See [al #### PREREQUISITES - [Python 3.8](https://www.python.org/downloads) -- [Airflow >= 2.1,<2.8](https://pypi.org/project/apache-airflow) +- [Airflow >= 2.1,\<2.8](https://pypi.org/project/apache-airflow) To use the OpenLineage Airflow integration, you'll need a running [Airflow instance](https://airflow.apache.org/docs/apache-airflow/stable/start.html). You'll also need an OpenLineage-compatible [backend](https://github.com/OpenLineage/OpenLineage#scope). diff --git a/website/docs/integrations/flink.md b/website/docs/integrations/flink.md index d252d9f5ff..871b94e953 100644 --- a/website/docs/integrations/flink.md +++ b/website/docs/integrations/flink.md @@ -118,11 +118,11 @@ and allows all the configuration features present there to be used. The configur The following parameters can be specified: -| Parameter | Definition | Example | -|------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------| -| openlineage.transport.type | The transport type used for event emit, default type is `console` | http | -| openlineage.facets.disabled | List of facets to disable, enclosed in `[]` (required from 0.21.x) and separated by `;`, default is `[spark_unknown;spark.logicalPlan;]` (currently must contain `;`) | \[some_facet1;some_facet1\] | -| openlineage.job.owners. | Specifies ownership of the job. Multiple entries with different types are allowed. Config key name and value are used to create job ownership type and name (available since 1.13). | openlineage.job.owners.team="Some Team" | +| Parameter | Definition | Example | +|-------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------| +| openlineage.transport.type | The transport type used for event emit, default type is `console` | http | +| openlineage.facets.disabled | List of facets to disable, enclosed in `[]` (required from 0.21.x) and separated by `;`, default is `[spark_unknown;spark.logicalPlan;]` (currently must contain `;`) | \[some_facet1;some_facet1\] | +| openlineage.job.owners.\ | Specifies ownership of the job. Multiple entries with different types are allowed. Config key name and value are used to create job ownership type and name (available since 1.13). | openlineage.job.owners.team="Some Team" | ## Transports diff --git a/website/docs/integrations/spark/configuration/spark_conf.md b/website/docs/integrations/spark/configuration/spark_conf.md index 58ab68e08a..fc57979886 100644 --- a/website/docs/integrations/spark/configuration/spark_conf.md +++ b/website/docs/integrations/spark/configuration/spark_conf.md @@ -22,5 +22,5 @@ The following parameters can be specified: | spark.openlineage.jobName.appendDatasetName | Decides whether output dataset name should be appended to job name. By default `true`. | false | | spark.openlineage.jobName.replaceDotWithUnderscore | Replaces dots in job name with underscore. Can be used to mimic legacy behaviour on Databricks platform. By default `false`. | false | | spark.openlineage.debugFacet | Determines whether debug facet shall be generated and included within the event. Set `enabled` to turn it on. By default, facet is disabled. | enabled | -| spark.openlineage.job.owners. | Specifies ownership of the job. Multiple entries with different types are allowed. Config key name and value are used to create job ownership type and name (available since 1.13). | spark.openlineage.job.owners.team="Some Team" | +| spark.openlineage.job.owners.\ | Specifies ownership of the job. Multiple entries with different types are allowed. Config key name and value are used to create job ownership type and name (available since 1.13). | spark.openlineage.job.owners.team="Some Team" | | spark.openlineage.columnLineage.datasetLineageEnabled | Makes the dataset dependencies to be included in their own property `dataset` in the column lineage pattern. If this flag is set to `false`, then the dataset dependencies are merged into `fields` property. The default value is `false`. **It is recommended to set it to `true`** | true | diff --git a/website/docs/spec/naming.md b/website/docs/spec/naming.md index ecd48fcef6..975495e76f 100644 --- a/website/docs/spec/naming.md +++ b/website/docs/spec/naming.md @@ -4,65 +4,72 @@ sidebar_position: 3 # Naming Conventions -Employing a unique naming strategy per resource ensures that the spec is followed uniformly regardless of metadata producer. +Employing a unique naming strategy per resource ensures that the spec is followed uniformly regardless of metadata +producer. -Jobs and Datasets have their own namespaces, job namespaces being derived from schedulers and dataset namespaces from datasources. +Jobs and Datasets have their own namespaces, job namespaces being derived from schedulers and dataset namespaces from +datasources. ## Dataset Naming A dataset, or `table`, is organized according to a producer, namespace, database and (optionally) schema. -| Data Store | Type | Namespace | Name | -|:------------------------------|:-------------------------------------|:-------------------------------------------------------------|:---------------------------------------------------------| -| Athena | Warehouse | awsathena://athena.{region_name}.amazonaws.com | {catalog}.{database}.{table} | -| AWS Glue | Data catalog | arn:aws:glue:{region}:{account id} | table/{database name}/{table name} | -| Azure Cosmos DB | Warehouse | azurecosmos://{host}/dbs/{database} | colls/{table} | -| Azure Data Explorer | Warehouse | azurekusto://{host}.kusto.windows.net | {database}/{table} | -| Azure Synapse | Warehouse | sqlserver://{host}:{port} | {schema}.{table} | -| BigQuery | Warehouse | bigquery:// | {project id}.{dataset name}.{table name} | -| Cassandra | Warehouse | cassandra://{host}:{port} | {keyspace}.{table} | -| MySQL | Warehouse | mysql://{host}:{port} | {database}.{table} | -| Oracle | Warehouse | oracle://{host}:{port} | {serviceName}.{schema}.{table} or {sid}.{schema}.{table} | -| Postgres | Warehouse | postgres://{host}:{port} | {database}.{schema}.{table} | -| Teradata | Warehouse | teradata://{host}:{port} | {database}.{table} | -| Redshift | Warehouse | redshift://{cluster_identifier}.{region_name}:{port} | {database}.{schema}.{table} | -| Snowflake | Warehouse | snowflake://{organization name}-{account name} | {database}.{schema}.{table} | -| Trino | Warehouse | trino://{host}:{port} | {catalog}.{schema}.{table} | -| ABFSS (Azure Data Lake Gen2) | Data lake | abfss://{container name}@{service name}.dfs.core.windows.net | {path} | -| DBFS (Databricks File System) | Distributed file system | dbfs://{workspace name} | {path} | -| GCS | Blob storage | gs://{bucket name} | {object key} | -| HDFS | Distributed file system | hdfs://{namenode host}:{namenode port} | {path} | -| Kafka | distributed event streaming platform | kafka://{bootstrap server host}:{port} | {topic} | -| Local file system | File system | file | {path} | -| Remote file system | File system | file://{host} | {path} | -| S3 | Blob Storage | s3://{bucket name} | {object key} | -| WASBS (Azure Blob Storage) | Blob Storage | wasbs://{container name}@{service name}.dfs.core.windows.net | {object key} | +| Data Store | Type | Namespace | Name | +|:------------------------------|:-------------------------------------|:---------------------------------------------------------------|:-----------------------------------------------------------| +| Athena | Warehouse | `awsathena://athena.{region_name}.amazonaws.com` | `{catalog}.{database}.{table}` | +| AWS Glue | Data catalog | `arn:aws:glue:{region}:{account id}` | `table/{database name}/{table name}` | +| Azure Cosmos DB | Warehouse | `azurecosmos://{host}/dbs/{database}` | `colls/{table}` | +| Azure Data Explorer | Warehouse | `azurekusto://{host}.kusto.windows.net` | `{database}/{table}` | +| Azure Synapse | Warehouse | `sqlserver://{host}:{port}` | `{schema}.{table}` | +| BigQuery | Warehouse | `bigquery://` | `{project id}.{dataset name}.{table name}` | +| Cassandra | Warehouse | `cassandra://{host}:{port}` | `{keyspace}.{table}` | +| MySQL | Warehouse | `mysql://{host}:{port}` | `{database}.{table}` | +| Oracle | Warehouse | `oracle://{host}:{port}` | `{serviceName}.{schema}.{table} or {sid}.{schema}.{table}` | +| Postgres | Warehouse | `postgres://{host}:{port}` | `{database}.{schema}.{table}` | +| Teradata | Warehouse | `teradata://{host}:{port}` | `{database}.{table}` | +| Redshift | Warehouse | `redshift://{cluster_identifier}.{region_name}:{port}` | `{database}.{schema}.{table}` | +| Snowflake | Warehouse | `snowflake://{organization name}-{account name}` | `{database}.{schema}.{table}` | +| Trino | Warehouse | `trino://{host}:{port}` | `{catalog}.{schema}.{table}` | +| ABFSS (Azure Data Lake Gen2) | Data lake | `abfss://{container name}@{service name}.dfs.core.windows.net` | `{path}` | +| DBFS (Databricks File System) | Distributed file system | `dbfs://{workspace name}` | `{path}` | +| GCS | Blob storage | `gs://{bucket name}` | `{object key}` | +| HDFS | Distributed file system | `hdfs://{namenode host}:{namenode port}` | `{path}` | +| Kafka | distributed event streaming platform | `kafka://{bootstrap server host}:{port}` | `{topic}` | +| Local file system | File system | `file` | `{path}` | +| Remote file system | File system | `file://{host}` | `{path}` | +| S3 | Blob Storage | `s3://{bucket name}` | `{object key}` | +| WASBS (Azure Blob Storage) | Blob Storage | `wasbs://{container name}@{service name}.dfs.core.windows.net` | `{object key}` | ## Job Naming -A `Job` is a recurring data transformation with inputs and outputs. Each execution is captured as a `Run` with corresponding metadata. +A `Job` is a recurring data transformation with inputs and outputs. Each execution is captured as a `Run` with +corresponding metadata. A `Run` event identifies the `Job` it instances by providing the job’s unique identifier. -The `Job` identifier is composed of a `Namespace` and `Name`. The job namespace is usually set in OpenLineage client config. The job name is unique within its namespace. +The `Job` identifier is composed of a `Namespace` and `Name`. The job namespace is usually set in OpenLineage client +config. The job name is unique within its namespace. -| Job type | Name | Example | -|:-------------|:----------------------------|:-----------------------------------------------------------| -| Airflow task | {dag_id}.{task_id} | orders_etl.count_orders | -| Spark job | {appName}.{command}.{table} | my_awesome_app.execute_insert_into_hive_table.mydb_mytable | -| SQL | {schema}.{table} | gx.validate_datasets | +| Job type | Name | Example | +|:-------------|:------------------------------|:-------------------------------------------------------------| +| Airflow task | `{dag_id}.{task_id}` | `orders_etl.count_orders` | +| Spark job | `{appName}.{command}.{table}` | `my_awesome_app.execute_insert_into_hive_table.mydb_mytable` | +| SQL | `{schema}.{table}` | `gx.validate_datasets` | ## Run Naming -Runs are named using client-generated UUIDs. The OpenLineage client is responsible for generating them and maintaining them throughout the duration of the runcycle. +Runs are named using client-generated UUIDs. The OpenLineage client is responsible for generating them and maintaining +them throughout the duration of the runcycle. ```python from openlineage.client.run import Run from openlineage.client.uuid import generate_new_uuid + run = Run(str(generate_new_uuid())) ``` ## Why Naming Matters -Naming enables focused insight into data flows, even when datasets and workflows are distributed across an organization. This focus enabled by naming is key to the production of useful lineage. +Naming enables focused insight into data flows, even when datasets and workflows are distributed across an organization. +This focus enabled by naming is key to the production of useful lineage. ![image](./naming-correlations.svg) From d1c97acf401fd7b8919bf86464b3e27e41e287e2 Mon Sep 17 00:00:00 2001 From: Jakub Dardzinski Date: Mon, 4 Nov 2024 18:15:02 +0100 Subject: [PATCH 44/89] Add EnvironmentVariablesRunFacet to core spec. (#3186) Use EnvironmentVariablesRunFacet in Python client. Signed-off-by: Jakub Dardzinski --- .pre_commit/check-redactions.py | 1 + client/python/openlineage/client/client.py | 123 ++++++-- client/python/openlineage/client/facet_v2.py | 2 + client/python/openlineage/client/facets.py | 10 + client/python/openlineage/client/filter.py | 20 +- .../generated/environment_variables_run.py | 35 +++ .../openlineage/client/transport/transport.py | 3 + client/python/openlineage/client/utils.py | 19 ++ client/python/redact_fields.yml | 7 + client/python/tests/test_client.py | 283 +++++++++++++++++- client/python/tests/test_utils.py | 37 ++- spec/facets/EnvironmentVariablesRunFacet.json | 47 +++ website/docs/client/python.md | 26 ++ .../run-facets/environment_variables.md | 13 + .../facets/run-facets/processing_engine.md | 16 + .../1-0-0/EnvironmentVariablesRunFacet.json | 47 +++ 16 files changed, 650 insertions(+), 39 deletions(-) create mode 100644 client/python/openlineage/client/facets.py create mode 100644 client/python/openlineage/client/generated/environment_variables_run.py create mode 100644 spec/facets/EnvironmentVariablesRunFacet.json create mode 100644 website/docs/spec/facets/run-facets/environment_variables.md create mode 100644 website/docs/spec/facets/run-facets/processing_engine.md create mode 100644 website/static/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json diff --git a/.pre_commit/check-redactions.py b/.pre_commit/check-redactions.py index 7af3e20653..130e7126c5 100755 --- a/.pre_commit/check-redactions.py +++ b/.pre_commit/check-redactions.py @@ -35,6 +35,7 @@ def get_class_names(code): redact_module = next((m for m in REDACT_FIELDS if m["module"] == module_name), None) if not redact_module: not_found.extend([MissingClass(module_name, clazz) for clazz in classes]) + continue for clazz in classes: # check if class is defined in yaml if not next((c for c in redact_module["classes"] if c["class_name"] == clazz), None): diff --git a/client/python/openlineage/client/client.py b/client/python/openlineage/client/client.py index e996f673c3..2fcadbba42 100644 --- a/client/python/openlineage/client/client.py +++ b/client/python/openlineage/client/client.py @@ -10,8 +10,9 @@ import attr import yaml -from openlineage.client.filter import Filter, create_filter +from openlineage.client.filter import Filter, FilterConfig, create_filter from openlineage.client.serde import Serde +from openlineage.client.utils import deep_merge_dicts if TYPE_CHECKING: from requests import Session @@ -20,8 +21,17 @@ import contextlib from openlineage.client import event_v2 +from openlineage.client.facets import FacetsConfig +from openlineage.client.generated.environment_variables_run import ( + EnvironmentVariable, + EnvironmentVariablesRunFacet, +) from openlineage.client.run import DatasetEvent, JobEvent, RunEvent -from openlineage.client.transport import Transport, TransportFactory, get_default_factory +from openlineage.client.transport import ( + Transport, + TransportFactory, + get_default_factory, +) from openlineage.client.transport.http import HttpConfig, HttpTransport, create_token_provider from openlineage.client.transport.noop import NoopConfig, NoopTransport @@ -38,6 +48,24 @@ class OpenLineageClientOptions: adapter: HTTPAdapter = attr.ib(default=None) +@attr.s +class OpenLineageConfig: + transport: dict[str, Any] | None = attr.ib(factory=dict) + facets: FacetsConfig = attr.ib(factory=FacetsConfig) + filters: list[FilterConfig] = attr.ib(factory=list) + + @classmethod + def from_dict(cls, params: dict[str, Any]) -> OpenLineageConfig: + config = cls() + if "transport" in params: + config.transport = params["transport"] + if "facets" in params: + config.facets = FacetsConfig(**params["facets"]) + if "filters" in params: + config.filters = [FilterConfig(**filter_config) for filter_config in params["filters"]] + return config + + log = logging.getLogger(__name__) _T = TypeVar("_T", bound="OpenLineageClient") @@ -53,6 +81,8 @@ def __init__( # noqa: PLR0913 session: Session | None = None, transport: Transport | None = None, factory: TransportFactory | None = None, + *, + config: dict[str, str] | None = None, ) -> None: # Set parent's logging level if environment variable is present custom_logging_level = os.getenv("OPENLINEAGE_CLIENT_LOGGING", None) @@ -68,7 +98,9 @@ def __init__( # noqa: PLR0913 # Make config ellipsis - as a guard value to not try to # reload yaml each time config is referred to. - self._config: dict[str, dict[str, str]] | None = None + self._config: OpenLineageConfig | None = None + + self.user_defined_config: dict[str, str] | None = config self._alias_env_vars() @@ -78,7 +110,7 @@ def __init__( # noqa: PLR0913 log.info("OpenLineageClient will use `%s` transport", self.transport.kind) self._filters: list[Filter] = [] - for conf in self.config.get("filters", []): + for conf in self.config.filters: _filter = create_filter(conf) if _filter: self._filters.append(_filter) @@ -94,7 +126,7 @@ def from_environment(cls: type[_T]) -> _T: @classmethod def from_dict(cls: type[_T], config: dict[str, str]) -> _T: - return cls(transport=get_default_factory().create(config=config)) + return cls(config=config) def filter_event( self, @@ -122,6 +154,8 @@ def emit(self, event: Event) -> None: log.debug("OpenLineage event has been filtered out and will not be emitted.") return + event = self.add_environment_facets(event) + if log.isEnabledFor(logging.DEBUG): val = Serde.to_json(event).encode("utf-8") log.debug("OpenLineageClient will emit event %s", val) @@ -129,17 +163,39 @@ def emit(self, event: Event) -> None: log.debug("OpenLineage event successfully emitted.") @property - def config(self) -> dict[str, Any]: - """Content of OpenLineage YAML config file.""" + def config(self) -> OpenLineageConfig: + """ + Retrieves the OpenLineage configuration. + + This property method returns the content of the OpenLineage YAML config file. + The configuration is determined by merging sources in the following order of precedence: + 1. User-defined configuration passed to the client constructor. + 2. YAML config file located in one of the following paths: + - Path specified by the `OPENLINEAGE_CONFIG` environment variable. + - Current working directory. + - `$HOME/.openlineage`. + 3. Environment variables with the `OPENLINEAGE__` prefix. + If the configuration is not already loaded, it will be constructed by merging the above sources. + In case of a TypeError during the parsing of the configuration, a ValueError will be raised indicating + that the structure of the config does not match the expected format. + """ if self._config is None: - config_path = self._find_yaml_config_path() - if config_path: - self._config = self._get_config_file_content(config_path) - else: - self._config = {} + config_dict: dict[str, Any] = {} + if self.user_defined_config: + config_dict = self.user_defined_config + if config_path := self._find_yaml_config_path(): + config_dict = deep_merge_dicts(self._get_config_file_content(config_path), config_dict) + if config_from_env_vars := self._load_config_from_env_variables(): + config_dict = deep_merge_dicts(config_from_env_vars, config_dict) + try: + self._config = OpenLineageConfig.from_dict(config_dict) + except TypeError as e: + # raise exception that structure of the config does not match + msg = "Failed to parse OpenLineage config." + raise ValueError(msg) from e return self._config - def _resolve_transport(self, **kwargs: Any) -> Transport: # noqa: PLR0911 + def _resolve_transport(self, **kwargs: Any) -> Transport: """ Resolves the transport mechanism based on the provided arguments or environment settings. @@ -164,9 +220,9 @@ def _resolve_transport(self, **kwargs: Any) -> Transport: # noqa: PLR0911 return cast(Transport, kwargs["transport"]) # 3. Check if transport configuration is provided in YAML config file - if self.config.get("transport"): + if self.config.transport and self.config.transport.get("type"): factory = kwargs.get("factory") or get_default_factory() - return factory.create(self.config["transport"]) + return factory.create(self.config.transport) # 4. Check legacy HTTP transport initialization with url and options if kwargs.get("url"): @@ -174,16 +230,11 @@ def _resolve_transport(self, **kwargs: Any) -> Transport: # noqa: PLR0911 url=kwargs["url"], options=kwargs.get("options"), session=kwargs.get("session") ) - # 5. Check transport initialization with env variables - if config := self._load_config_from_env_variables(): - factory = kwargs.get("factory") or get_default_factory() - return factory.create(config["transport"]) - - # 6. Check HTTP transport initialization with env variables + # 5. Check HTTP transport initialization with env variables if os.environ.get("OPENLINEAGE_URL"): return self._http_transport_from_env_variables() - # 7. If all else fails, print events to console + # 6. If all else fails, print events to console from openlineage.client.transport.console import ConsoleConfig, ConsoleTransport log.warning("Couldn't find any OpenLineage transport configuration; will print events to console.") @@ -306,9 +357,6 @@ def _load_config_from_env_variables(cls) -> dict[str, Any] | None: cls._insert_into_config(config, keys, env_value) - if (transport_config := config.get("transport")) is None or transport_config.get("type") is None: - return None - return config @staticmethod @@ -323,3 +371,28 @@ def _insert_into_config(config: dict[str, Any], key_path: list[str], value: str) # Overwrite if key already exists current[keys[-1]] = value + + def add_environment_facets(self, event: Event) -> Event: + """ + Adds environment variables as facets to the event object. + """ + if isinstance(event, RunEvent) and (env_vars := self._collect_environment_variables()): + event.run.facets["environmentVariables"] = EnvironmentVariablesRunFacet( + environmentVariables=[ + EnvironmentVariable(name=name, value=value) for name, value in env_vars.items() + ] + ) + return event + + def _collect_environment_variables(self) -> dict[str, str]: + """ + Collects and returns a dictionary of relevant environment variables. + """ + filtered_vars = {k: v for k, v in os.environ.items() if k in self.config.facets.environment_variables} + missing_vars = set(self.config.facets.environment_variables) - set(filtered_vars) + if missing_vars: + log.warning( + "The following environment variables are missing: %s when adding to OpenLineage event", + missing_vars, + ) + return filtered_vars diff --git a/client/python/openlineage/client/facet_v2.py b/client/python/openlineage/client/facet_v2.py index 3bf3f41886..177ddae1be 100644 --- a/client/python/openlineage/client/facet_v2.py +++ b/client/python/openlineage/client/facet_v2.py @@ -9,6 +9,7 @@ datasource_dataset, documentation_dataset, documentation_job, + environment_variables_run, error_message_run, external_query_run, extraction_error_run, @@ -54,6 +55,7 @@ "datasource_dataset", "documentation_dataset", "documentation_job", + "environment_variables_run", "error_message_run", "external_query_run", "extraction_error_run", diff --git a/client/python/openlineage/client/facets.py b/client/python/openlineage/client/facets.py new file mode 100644 index 0000000000..e12902945c --- /dev/null +++ b/client/python/openlineage/client/facets.py @@ -0,0 +1,10 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 +from __future__ import annotations + +import attr + + +@attr.s +class FacetsConfig: + environment_variables: list[str] = attr.ib(factory=list) diff --git a/client/python/openlineage/client/filter.py b/client/python/openlineage/client/filter.py index 88d77d63bc..e6fe0194cb 100644 --- a/client/python/openlineage/client/filter.py +++ b/client/python/openlineage/client/filter.py @@ -5,12 +5,20 @@ import re import typing +import attr from openlineage.client.event_v2 import RunEvent as RunEvent_v2 from openlineage.client.run import RunEvent RunEventType = typing.Union[RunEvent, RunEvent_v2] +@attr.s +class FilterConfig: + type: str | None = attr.ib(default=None) + match: str | None = attr.ib(default=None) + regex: str | None = attr.ib(default=None) + + class Filter: def filter_event(self, event: RunEventType) -> RunEventType | None: ... @@ -36,12 +44,12 @@ def filter_event(self, event: RunEventType) -> RunEventType | None: return event -def create_filter(conf: dict[str, str]) -> Filter | None: - if "type" not in conf: +def create_filter(conf: FilterConfig) -> Filter | None: + if not conf.type: return None # Switch in 3.10 🙂 - if conf["type"] == "exact": - return ExactMatchFilter(match=conf["match"]) - if conf["type"] == "regex": - return RegexFilter(regex=conf["regex"]) + if conf.type == "exact" and conf.match: + return ExactMatchFilter(match=conf.match) + if conf.type == "regex" and conf.regex: + return RegexFilter(regex=conf.regex) return None diff --git a/client/python/openlineage/client/generated/environment_variables_run.py b/client/python/openlineage/client/generated/environment_variables_run.py new file mode 100644 index 0000000000..6b7be2dc45 --- /dev/null +++ b/client/python/openlineage/client/generated/environment_variables_run.py @@ -0,0 +1,35 @@ +# Copyright 2018-2024 contributors to the OpenLineage project +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +from typing import ClassVar + +import attr +from openlineage.client.generated.base import RunFacet +from openlineage.client.utils import RedactMixin + + +@attr.define +class EnvironmentVariable(RedactMixin): + name: str + """The name of the environment variable.""" + + value: str + """The value of the environment variable.""" + + _skip_redact: ClassVar[list[str]] = ["name"] + + @staticmethod + def _get_schema() -> str: + return "https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json#/$defs/EnvironmentVariable" + + +@attr.define +class EnvironmentVariablesRunFacet(RunFacet): + environmentVariables: list[EnvironmentVariable] # noqa: N815 + """The environment variables for the run.""" + + @staticmethod + def _get_schema() -> str: + return "https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json#/$defs/EnvironmentVariablesRunFacet" diff --git a/client/python/openlineage/client/transport/transport.py b/client/python/openlineage/client/transport/transport.py index be6b455567..facb3963a0 100644 --- a/client/python/openlineage/client/transport/transport.py +++ b/client/python/openlineage/client/transport/transport.py @@ -20,6 +20,8 @@ class config. from typing import TYPE_CHECKING, Any, TypeVar +import attr + if TYPE_CHECKING: from openlineage.client.client import Event @@ -27,6 +29,7 @@ class config. _T = TypeVar("_T", bound="Config") +@attr.s class Config: @classmethod def from_dict(cls: type[_T], params: dict[str, Any]) -> _T: # noqa: ARG003 diff --git a/client/python/openlineage/client/utils.py b/client/python/openlineage/client/utils.py index 95211e762f..e4e152f414 100644 --- a/client/python/openlineage/client/utils.py +++ b/client/python/openlineage/client/utils.py @@ -36,6 +36,25 @@ def get_only_specified_fields(clazz: type[Any], params: dict[str, Any]) -> dict[ return {key: value for key, value in params.items() if key in field_keys} +def deep_merge_dicts(dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]: + """Deep merges two dictionaries. + + This function merges two dictionaries while handling nested dictionaries. + For keys that exist in both dictionaries, the values from dict2 take precedence. + If a key exists in both dictionaries and the values are dictionaries themselves, + they are merged recursively. + This function merges only dictionaries. If key is of different type, e.g. list + it does not work properly. + """ + merged = dict1.copy() + for k, v in dict2.items(): + if k in merged and isinstance(v, dict): + merged[k] = deep_merge_dicts(merged.get(k, {}), v) + else: + merged[k] = v + return merged + + class RedactMixin: _skip_redact: ClassVar[list[str]] = [] diff --git a/client/python/redact_fields.yml b/client/python/redact_fields.yml index dce0b30690..24b3ed940e 100644 --- a/client/python/redact_fields.yml +++ b/client/python/redact_fields.yml @@ -92,6 +92,13 @@ classes: - class_name: DocumentationJobFacet redact_fields: [] +- module: environment_variables_run + classes: + - class_name: EnvironmentVariable + redact_fields: + - name + - class_name: EnvironmentVariablesRunFacet + redact_fields: [] - module: error_message_run classes: - class_name: ErrorMessageRunFacet diff --git a/client/python/tests/test_client.py b/client/python/tests/test_client.py index 5d89bf6ffb..bbb76b7521 100644 --- a/client/python/tests/test_client.py +++ b/client/python/tests/test_client.py @@ -9,7 +9,12 @@ from unittest.mock import MagicMock, patch import pytest -from openlineage.client.client import OpenLineageClient, OpenLineageClientOptions +from openlineage.client.client import OpenLineageClient, OpenLineageClientOptions, OpenLineageConfig +from openlineage.client.facets import FacetsConfig +from openlineage.client.generated.environment_variables_run import ( + EnvironmentVariable, + EnvironmentVariablesRunFacet, +) from openlineage.client.run import ( SCHEMA_URL, Dataset, @@ -351,22 +356,137 @@ def test_find_yaml_config_path_checks_all_paths(mock_is_file, mocker: MockerFixt assert result is None +def test_ol_config_from_dict(): + # Test with complete config + config_dict = { + "transport": {"url": "http://localhost:5050"}, + "facets": {"environment_variables": ["VAR1", "VAR2"]}, + "filters": [{"type": "exact", "match": "job_name"}], + } + config = OpenLineageConfig.from_dict(config_dict) + assert config.transport["url"] == "http://localhost:5050" + assert config.facets.environment_variables == ["VAR1", "VAR2"] + assert config.filters[0].type == "exact" + assert config.filters[0].match == "job_name" + + # Test with missing keys + config_dict = {} + config = OpenLineageConfig.from_dict(config_dict) + assert config.transport == {} + assert config.facets == FacetsConfig() + assert config.filters == [] + + # Test with invalid data type + with pytest.raises(TypeError): + OpenLineageConfig.from_dict({"facets": "invalid_data"}) + + @patch("yaml.safe_load", return_value=None) def test_config_file_content_empty_file(mock_yaml) -> None: # noqa: ARG001 - assert OpenLineageClient().config == {} + assert OpenLineageClient().config == OpenLineageConfig() def test_config(mocker: MockerFixture, root: Path) -> None: mocker.patch.dict(os.environ, {"OPENLINEAGE_CONFIG": str(root / "config" / "http.yml")}) - assert OpenLineageClient().config == { + assert OpenLineageClient().config == OpenLineageConfig.from_dict( + { + "transport": { + "auth": {"apiKey": "random_token", "type": "api_key"}, + "compression": "gzip", + "endpoint": "api/v1/lineage", + "type": "http", + "url": "http://localhost:5050", + } + } + ) + + +def test_openlineage_client_from_dict() -> None: + config_dict = {"transport": {"type": "http", "url": "http://localhost:5050"}} + client = OpenLineageClient.from_dict(config_dict) + assert client.transport.url == "http://localhost:5050" + + +def test_openlineage_client_from_empty_dict() -> None: + client = OpenLineageClient.from_dict({}) + assert isinstance(client.transport, ConsoleTransport) + + +def test_openlineage_config_from_dict() -> None: + config_dict = { "transport": { - "auth": {"apiKey": "random_token", "type": "api_key"}, - "compression": "gzip", - "endpoint": "api/v1/lineage", "type": "http", "url": "http://localhost:5050", - } + "auth": {"api_key": "random_token"}, + }, + "facets": { + "environment_variables": ["VAR1", "VAR2"], + }, + "filters": [ + {"type": "regex", "match": ".*"}, + ], } + config = OpenLineageConfig.from_dict(config_dict) + + assert config.transport == config_dict["transport"] + assert config.facets.environment_variables == config_dict["facets"]["environment_variables"] + assert len(config.filters) == 1 + assert config.filters[0].type == "regex" + assert config.filters[0].match == ".*" + + +def test_openlineage_config_default_values() -> None: + config = OpenLineageConfig() + + assert config.transport == {} + assert isinstance(config.facets, FacetsConfig) + assert config.filters == [] + + +@patch.dict(os.environ, {"ENV_VAR_1": "value1", "ENV_VAR_2": "value2"}) +def test_collect_environment_variables(): + client = OpenLineageClient() + client._config = OpenLineageConfig( # noqa: SLF001 + facets=FacetsConfig(environment_variables=["ENV_VAR_1", "ENV_VAR_2", "MISSING_VAR"]) + ) + env_vars = client._collect_environment_variables() # noqa: SLF001 + assert env_vars == {"ENV_VAR_1": "value1", "ENV_VAR_2": "value2"} + + +@patch.dict(os.environ, {"ENV_VAR_1": "value1", "SENSITIVE_VAR": "PII"}) +def test_add_environment_facets(): + client = OpenLineageClient() + client._config = OpenLineageConfig( # noqa: SLF001 + facets=FacetsConfig(environment_variables=["ENV_VAR_1"]) + ) + run = Run(runId=str(generate_new_uuid())) + event = RunEvent( + eventType=RunState.START, + eventTime="2021-11-03T10:53:52.427343", + run=run, + job=Job(name="name", namespace=""), + producer="", + schemaURL="", + ) + event.run.facets = {} + + modified_event = client.add_environment_facets(event) + + assert "environmentVariables" in modified_event.run.facets + assert modified_event.run.facets["environmentVariables"] == EnvironmentVariablesRunFacet( + [EnvironmentVariable(name="ENV_VAR_1", value="value1")] + ) + + +@patch("openlineage.client.client.OpenLineageClient._find_yaml_config_path") +@patch("openlineage.client.client.OpenLineageClient._get_config_file_content") +def test_config_property_loads_yaml(mock_get_config_content, mock_find_yaml): + mock_find_yaml.return_value = "config.yml" + mock_get_config_content.return_value = {"transport": {"type": "http", "url": "http://localhost:5050"}} + + config = OpenLineageClient().config + assert config.transport["type"] == "http" + assert config.transport["url"] == "http://localhost:5050" @patch.dict( @@ -490,6 +610,155 @@ def test_kafka_transport_configured_with_aliased_message_key() -> None: } +@patch.dict( + os.environ, + { + "CUSTOM_ENV_VAR": "custom_value", + "OPENLINEAGE__TRANSPORT__TYPE": "console", + "OPENLINEAGE__FACETS__ENVIRONMENT_VARIABLES": '["CUSTOM_ENV_VAR"]', + }, +) +@patch("openlineage.client.client.OpenLineageClient._resolve_transport") +def test_add_environment_facets_with_custom_env_var(mock_resolve_transport) -> None: + mock_resolve_transport.return_value = mock_transport = MagicMock() + client = OpenLineageClient() + run = Run(runId=str(generate_new_uuid())) + event = RunEvent( + eventType=RunState.START, + eventTime="2021-11-03T10:53:52.427343", + run=run, + job=Job(name="name", namespace=""), + producer="", + schemaURL="", + ) + + client.emit(event) + assert mock_transport.emit.call_args[0][0].run.facets[ + "environmentVariables" + ] == EnvironmentVariablesRunFacet([EnvironmentVariable(name="CUSTOM_ENV_VAR", value="custom_value")]) + + +@patch.dict( + os.environ, + { + "OPENLINEAGE__TRANSPORT__TYPE": "http", + "OPENLINEAGE__TRANSPORT__URL": "http://localhost:5050", + "OPENLINEAGE__TRANSPORT__AUTH__API_KEY": "random_token", + }, +) +@patch("openlineage.client.client.OpenLineageClient._find_yaml_config_path") +def test_config_property_loads_env_vars(mock_find_yaml) -> None: + mock_find_yaml.return_value = None + client = OpenLineageClient() + config = client.config + assert config.transport["type"] == "http" + assert config.transport["url"] == "http://localhost:5050" + assert config.transport["auth"]["api_key"] == "random_token" + + +def test_config_property_loads_user_defined_config() -> None: + user_defined_config = { + "transport": { + "type": "http", + "url": "http://localhost:5050", + "auth": {"api_key": "random_token"}, + } + } + client = OpenLineageClient(config=user_defined_config) + config = client.config + assert config.transport["type"] == "http" + assert config.transport["url"] == "http://localhost:5050" + assert config.transport["auth"]["api_key"] == "random_token" + + +@patch.dict( + os.environ, + { + "OPENLINEAGE__TRANSPORT__TYPE": "http", + "OPENLINEAGE__TRANSPORT__URL": "http://localhost:5050", + }, +) +def test_client_from_empty_dict_with_dynamic_env_vars() -> None: + client = OpenLineageClient.from_dict({}) + assert client.transport.url == "http://localhost:5050" + + +@patch.dict( + os.environ, + { + "OPENLINEAGE_URL": "http://example.com", + }, +) +def test_client_from_empty_dict_with_url_env_var() -> None: + client = OpenLineageClient.from_dict({}) + assert client.transport.url == "http://example.com" + + +@patch.dict( + os.environ, + { + "OPENLINEAGE__TRANSPORT__TYPE": "http", + "OPENLINEAGE__TRANSPORT__URL": "http://localhost:5050", + }, +) +def test_client_from_facets_config_in_dict_and_env_vars() -> None: + user_defined_config = { + "facets": { + "environment_variables": ["VAR1", "VAR2"], + } + } + client = OpenLineageClient.from_dict(user_defined_config) + assert client.config.facets.environment_variables == ["VAR1", "VAR2"] + assert client.transport.url == "http://localhost:5050" + + +@patch.dict( + os.environ, + { + "OPENLINEAGE__FACETS__ENVIRONMENT_VARIABLES": '["VAR1", "VAR2"]', + }, +) +def test_client_from_facets_config_in_env_vars_and_transport_in_config() -> None: + user_defined_config = { + "transport": { + "type": "http", + "url": "http://localhost:5050", + } + } + client = OpenLineageClient.from_dict(user_defined_config) + assert client.config.facets.environment_variables == ["VAR1", "VAR2"] + assert client.transport.url == "http://localhost:5050" + + +@patch.dict( + os.environ, + { + "OPENLINEAGE__TRANSPORT__AUTH__API_KEY": "random_token", + }, +) +@patch("openlineage.client.client.OpenLineageClient._find_yaml_config_path") +@patch("openlineage.client.client.OpenLineageClient._get_config_file_content") +def test_config_merge_precedence(mock_get_config_content, mock_find_yaml) -> None: + user_defined_config = { + "transport": { + "type": "http", + "url": "http://localhost:5050", + } + } + mock_find_yaml.return_value = "config.yml" + mock_get_config_content.return_value = { + "transport": { + "url": "http://another.host:5050", + "auth": {"api_key": "another_token"}, + } + } + client = OpenLineageClient.from_dict(user_defined_config) + config = client.config + assert config.transport["type"] == "http" + assert config.transport["url"] == "http://localhost:5050" + assert config.transport["auth"]["api_key"] == "another_token" + + class TestOpenLineageConfigLoader: @pytest.mark.parametrize( ("env_vars", "expected_config"), diff --git a/client/python/tests/test_utils.py b/client/python/tests/test_utils.py index 81a2231375..f52c40fd81 100644 --- a/client/python/tests/test_utils.py +++ b/client/python/tests/test_utils.py @@ -3,7 +3,7 @@ from __future__ import annotations import pytest -from openlineage.client.utils import import_from_string, try_import_from_string +from openlineage.client.utils import deep_merge_dicts, import_from_string, try_import_from_string def test_import_from_string(): @@ -28,3 +28,38 @@ def test_try_import_from_string(): def test_try_import_from_string_unknown(): result = try_import_from_string("openlineage.client.non-existing-module") assert result is None + + +def test_deep_merge_dicts_simple(): + dict1 = {"a": 1, "b": 2} + dict2 = {"b": 3, "c": 4} + expected = {"a": 1, "b": 3, "c": 4} + assert deep_merge_dicts(dict1, dict2) == expected + + +def test_deep_merge_dicts_nested(): + dict1 = {"a": {"x": 1}, "b": 2} + dict2 = {"a": {"y": 2}, "b": 3, "c": 4} + expected = {"a": {"x": 1, "y": 2}, "b": 3, "c": 4} + assert deep_merge_dicts(dict1, dict2) == expected + + +def test_deep_merge_dicts_overwrite(): + dict1 = {"a": {"x": 1}} + dict2 = {"a": {"x": 2}} + expected = {"a": {"x": 2}} + assert deep_merge_dicts(dict1, dict2) == expected + + +def test_deep_merge_dicts_non_dict_values(): + dict1 = {"a": 1, "b": {"x": 1}} + dict2 = {"b": 2, "c": 3} + expected = {"a": 1, "b": 2, "c": 3} + assert deep_merge_dicts(dict1, dict2) == expected + + +def test_deep_merge_dicts_empty_dicts(): + dict1 = {} + dict2 = {"a": 1} + expected = {"a": 1} + assert deep_merge_dicts(dict1, dict2) == expected diff --git a/spec/facets/EnvironmentVariablesRunFacet.json b/spec/facets/EnvironmentVariablesRunFacet.json new file mode 100644 index 0000000000..95bf3d2e8d --- /dev/null +++ b/spec/facets/EnvironmentVariablesRunFacet.json @@ -0,0 +1,47 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json", + "$defs": { + "EnvironmentVariable": { + "type": "object", + "properties": { + "name": { + "description": "The name of the environment variable.", + "type": "string" + }, + "value": { + "description": "The value of the environment variable.", + "type": "string" + } + }, + "required": ["name", "value"] + }, + "EnvironmentVariablesRunFacet": { + "allOf": [ + { + "$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunFacet" + }, + { + "type": "object", + "properties": { + "environmentVariables": { + "description": "The environment variables for the run.", + "type": "array", + "items": { + "$ref": "#/$defs/EnvironmentVariable" + } + } + }, + "required": ["environmentVariables"] + } + ], + "type": "object" + } + }, + "type": "object", + "properties": { + "environmentVariables": { + "$ref": "#/$defs/EnvironmentVariablesRunFacet" + } + } +} diff --git a/website/docs/client/python.md b/website/docs/client/python.md index 60087a742c..1f80060307 100644 --- a/website/docs/client/python.md +++ b/website/docs/client/python.md @@ -538,6 +538,32 @@ To implement a custom transport, follow the instructions in [`transport.py`](htt The `type` property (required) must be a fully qualified class name that can be imported. +## Environment Variables Run Facet + +To include specific environment variables in OpenLineage events, the `OpenLineageClient` can add them as a facet called `EnvironmentVariablesRunFacet`. This feature allows you to specify which environment variables should be collected and attached to each emitted event. + +To enable this, configure the `environment_variables` option within the `facets` section of your `OpenLineageClient` configuration. + + + + +```yaml +facets: + environment_variables: + - VAR1 + - VAR2 +``` + + + + +```sh +OPENLINEAGE__FACETS__ENVIRONMENT_VARIABLES='["VAR1", "VAR2"]' +``` + + + + ## Getting Started To try out the client, follow the steps below to install and explore OpenLineage, Marquez (the reference implementation of OpenLineage), and the client itself. Then, the instructions will show you how to use these tools to add a run event and datasets to an existing namespace. diff --git a/website/docs/spec/facets/run-facets/environment_variables.md b/website/docs/spec/facets/run-facets/environment_variables.md new file mode 100644 index 0000000000..b142986f71 --- /dev/null +++ b/website/docs/spec/facets/run-facets/environment_variables.md @@ -0,0 +1,13 @@ +--- +sidebar_position: 6 +--- + +# Environment Variables Run Facet +The Environment Variables Run Facet provides detailed information about the environment variables that were set during the execution of a job. This facet is useful for capturing the runtime environment configuration, which can be used for categorizing and filtering jobs based on their environment settings. + +| Property | Description | Type | Example | Required | +|-----------------------|-----------------------------------------------------------------------------|--------|---------------------------|----------| +| name | The name of the environment variable. This helps in identifying the specific environment variable used during the job run. | string | "JAVA_HOME" | Yes | +| value | The value of the environment variable. This captures the actual value set for the environment variable during the job run. | string | "/usr/lib/jvm/java-11" | Yes | + +The facet specification can be found [here](https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json). \ No newline at end of file diff --git a/website/docs/spec/facets/run-facets/processing_engine.md b/website/docs/spec/facets/run-facets/processing_engine.md new file mode 100644 index 0000000000..86e857e3ac --- /dev/null +++ b/website/docs/spec/facets/run-facets/processing_engine.md @@ -0,0 +1,16 @@ +--- +sidebar_position: 5 +--- + +# Processing Engine Run Facet +The Processing Engine Run Facet provides detailed information about the processing engine that executed the job. This facet is commonly used to track and document the specific engine and its version, ensuring reproducibility and aiding in debugging processes. + +| Property | Description | Type | Example | Required | +|---------------------------|-----------------------------------------------------------------------------|--------|-----------|----------| +| version | The version of the processing engine, such as Airflow or Spark. This helps in identifying the exact environment in which the job was run. | string | "2.5.0" | Yes | +| name | The name of the processing engine, for example, Airflow or Spark. This is useful for categorizing and filtering jobs based on the engine used. | string | "Airflow" | Yes | +| openlineageAdapterVersion | The version of the OpenLineage adapter package used, such as the OpenLineage Airflow integration package version. This can be helpful for troubleshooting and ensuring compatibility. | string | "0.19.0" | No | + +Example use case: When a data pipeline job fails, the Processing Engine Run Facet can be used to quickly identify the version and type of processing engine that was used, making it easier to replicate the issue and find a solution. + +The facet specification can be found [here](https://openlineage.io/spec/facets/1-1-1/ProcessingEngineRunFacet.json#/$defs/ProcessingEngineRunFacet). \ No newline at end of file diff --git a/website/static/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json b/website/static/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json new file mode 100644 index 0000000000..95bf3d2e8d --- /dev/null +++ b/website/static/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json @@ -0,0 +1,47 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "$id": "https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json", + "$defs": { + "EnvironmentVariable": { + "type": "object", + "properties": { + "name": { + "description": "The name of the environment variable.", + "type": "string" + }, + "value": { + "description": "The value of the environment variable.", + "type": "string" + } + }, + "required": ["name", "value"] + }, + "EnvironmentVariablesRunFacet": { + "allOf": [ + { + "$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunFacet" + }, + { + "type": "object", + "properties": { + "environmentVariables": { + "description": "The environment variables for the run.", + "type": "array", + "items": { + "$ref": "#/$defs/EnvironmentVariable" + } + } + }, + "required": ["environmentVariables"] + } + ], + "type": "object" + } + }, + "type": "object", + "properties": { + "environmentVariables": { + "$ref": "#/$defs/EnvironmentVariablesRunFacet" + } + } +} From 56899b4715958a66a59b0792a326beff62f72b1c Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Tue, 5 Nov 2024 13:17:13 +0100 Subject: [PATCH 45/89] docs: Upgrade docusaurus to 3.6 (#3219) Signed-off-by: Artur Owczarek --- website/blog/composite-transport/index.mdx | 8 +- website/blog/dbt-with-marquez/index.mdx | 13 +- website/blog/dynamic-env-variables/index.mdx | 30 +- website/blog/explore-lineage-api/index.mdx | 14 +- website/blog/extending-with-facets/index.mdx | 20 +- .../index.mdx | 18 +- website/blog/openlineage-spark/index.mdx | 8 +- website/docusaurus.config.js | 7 +- website/package.json | 34 +- website/plugins/home-blog-plugin.js | 34 +- website/src/css/custom.css | 44 +- website/src/theme/SearchBar/DocSearch.js | 297 - website/src/theme/SearchBar/algolia.css | 526 -- website/src/theme/SearchBar/index.js | 114 - website/src/theme/SearchBar/lunar-search.js | 147 - website/src/theme/SearchBar/styles.css | 33 - website/src/theme/SearchBar/templates.js | 112 - website/src/theme/SearchBar/utils.js | 270 - website/yarn.lock | 7511 +++++++++-------- 19 files changed, 3955 insertions(+), 5285 deletions(-) delete mode 100644 website/src/theme/SearchBar/DocSearch.js delete mode 100644 website/src/theme/SearchBar/algolia.css delete mode 100644 website/src/theme/SearchBar/index.js delete mode 100644 website/src/theme/SearchBar/lunar-search.js delete mode 100644 website/src/theme/SearchBar/styles.css delete mode 100644 website/src/theme/SearchBar/templates.js delete mode 100644 website/src/theme/SearchBar/utils.js diff --git a/website/blog/composite-transport/index.mdx b/website/blog/composite-transport/index.mdx index ed33e88a09..c992153740 100644 --- a/website/blog/composite-transport/index.mdx +++ b/website/blog/composite-transport/index.mdx @@ -12,18 +12,18 @@ In the world of data lineage, it's essential to have a reliable and flexible way -# What is the Composite Transport? +## What is the Composite Transport? The Composite Transport is a type of transport in OpenLineage that enables you to send events to multiple destinations sequentially. This is useful when you need to send events to multiple targets, such as a logging system and an API endpoint, one after another in a defined order. -## Key Features of the Composite Transport +### Key Features of the Composite Transport * Flexibility: The Composite Transport can include a variety of other transport types, such as HTTP, Kafka, and more, allowing you to choose the best transport for each destination. * Convenience: The Composite Transport allows you to configure multiple transports in a single configuration, making it easy to set up and manage your event emission process. * Redundancy: With the Composite Transport, you can send events to multiple destinations for redundancy, ensuring that your events are delivered even if one destination fails. * Different processing: The Composite Transport allows you to send events to different destinations for different types of processing, such as logging and analytics. -# How to Use the Composite Transport +## How to Use the Composite Transport To use the Composite Transport, you can configure it in two formats: a list of transport configurations or a map of transport configurations. The list format is useful when you have a fixed set of transports, while the map format is useful when you need to configure transports dynamically. @@ -107,7 +107,7 @@ transport: -# Conclusion +## Conclusion The Composite Transport enhances event distribution in OpenLineage by providing flexibility, redundancy, and ease of configuration. Whether you need to send events to multiple targets or ensure reliable delivery in the event of failure, this feature simplifies your workflow. diff --git a/website/blog/dbt-with-marquez/index.mdx b/website/blog/dbt-with-marquez/index.mdx index eb41602449..f47856bb0b 100644 --- a/website/blog/dbt-with-marquez/index.mdx +++ b/website/blog/dbt-with-marquez/index.mdx @@ -8,9 +8,6 @@ Each time dbt runs, it generates a trove of metadata about datasets and the work -```toc -``` - The first time I built a data warehouse was in a completely different era, even though it wasn’t all that long ago. It was a few dozen tables + a collection of loader scripts and an ETL tool. If I’m honest, calling the whole thing a “data warehouse” is a bit grandiose, but it worked. At the time, my defining question was “how can I make all of my most important data available for study without spending more than it’s worth?” Because my database capacity wasn’t infinite, I couldn’t keep all of my data forever. The jobs I wrote would pull data from operational data stores, perform a bunch of slicing and aggregation, and load summary data into the warehouse. They shoveled bits every night from one server to another, performing calculations in between - and that meant they had to run on a beefy server with close proximity to my data. @@ -21,7 +18,7 @@ Cloud data warehouses like [Google BigQuery](https://cloud.google.com/bigquery/) Fortunately, each time dbt runs it generates a trove of metadata about datasets and the work it performs with them. In this post, I’d like to show you how to harvest this metadata and put it to good use. -# Our Example +## Our Example For our example, let’s choose the kind of experiment that I might run in my day-to-day life. I’m the head of marketing at [Datakin](https://datakin.com), which means the metrics I’m most interested in are usually about some sort of human behavior. @@ -41,7 +38,7 @@ So let’s see what we can learn from the Stack Overflow [public data set in Big This is exactly the kind of experiment I have run multiple times over the years, across numerous stacks. It’s usually pretty messy. But this time, after running all of these models, we will be rewarded with a gorgeous [Marquez](https://marquezproject.ai/) lineage graph. We’ll be able to see how everything fits together. -# Setting Everything Up +## Setting Everything Up First, if you haven’t already, run through the excellent [dbt tutorial](https://docs.getdbt.com/tutorial/setting-up). It will show you how to create a BigQuery project, provision a service account, download a JSON key, and set up your local dbt environment. The rest of this example assumes that you have created a BigQuery project where our models can be run, and you know how to properly configure dbt to connect to it. @@ -116,7 +113,7 @@ Connection: Connection test: OK connection ok ``` -# A Few Important Details +## A Few Important Details There are a couple of considerations to make when designing dbt models for use with OpenLineage. By following these conventions, you can help OpenLineage collect the most complete metadata possible. @@ -145,7 +142,7 @@ select * from {{ source('stackoverflow', 'posts_answers') }} where parent_id in (select id from {{ ref('filtered_questions') }} ) ``` -# Performing a Run +## Performing a Run Okay! We are ready to perform a run. Before we do, though, there’s one last step we need to take. @@ -201,7 +198,7 @@ Emitted 16 openlineage events Note the output showing the number of OpenLineage events emitted to Marquez. -# Reviewing the Output +## Reviewing the Output If everything ran successfully you should be able to see a list of jobs when you navigate to http://localhost:3000. Upon clicking a job, you will see a lineage graph that looks similar to this: diff --git a/website/blog/dynamic-env-variables/index.mdx b/website/blog/dynamic-env-variables/index.mdx index 68b855f7bf..cdbfd24294 100644 --- a/website/blog/dynamic-env-variables/index.mdx +++ b/website/blog/dynamic-env-variables/index.mdx @@ -18,36 +18,36 @@ In this guide, we'll explore how to leverage dynamic environment variables to si -# What's New in 1.23.0 +## What's New in 1.23.0 In the upcoming **1.23.0 release** of OpenLineage, we're excited to introduce dynamic environment variables, which will revolutionize the way you configure your OpenLineage client. With this feature, you'll be able to simplify your configuration, improve maintainability, and adapt to changing environments with ease. -# Why Dynamic Environment Variables? +## Why Dynamic Environment Variables? -### Flexibility on a Whole New Level +#### Flexibility on a Whole New Level Stop worrying about reconfiguring code or files—dynamic environment variables let you adjust settings on the fly. This feature is perfect for developers, testers, and production environments that demand real-time adaptability. -### Centralized Management at Your Fingertips +#### Centralized Management at Your Fingertips With environment variables, you can securely store configuration details alongside other essential application settings. No more scattering information across multiple files—it’s all in one place. -### Cleaner Code, Happier Developers +#### Cleaner Code, Happier Developers Say goodbye to the clutter of parsing configuration files in your codebase. Dynamic environment variables lead to cleaner, more maintainable applications, saving you time and headaches. -### Seamless Integration for Effortless Deployment +#### Seamless Integration for Effortless Deployment Dynamic environment variables easily sync with modern deployment tools and practices, ensuring smooth and stress-free configuration management every time. -## When to Use Dynamic Environment Variables +### When to Use Dynamic Environment Variables -### Development & Testing +#### Development & Testing Need to experiment with new configurations quickly? Dynamic environment variables let you do just that—without ever touching your code. -### Production Deployments +#### Production Deployments Keep your production environment secure and organized by managing all configuration settings with ease and efficiency. -### Dynamic Environments +#### Dynamic Environments No more manual updates! Easily adapt to rapidly changing environments with this flexible configuration approach. -## Key Features You’ll Love +### Key Features You’ll Love * **Dynamic Updates**: Modify configurations in real-time without restarting your application. * **Hierarchical Organization**: Keep your settings clear and manageable with nested sections. * **Automatic Conversions**: OpenLineage automatically converts variable names to camelCase in Java, ensuring consistency. @@ -55,7 +55,7 @@ No more manual updates! Easily adapt to rapidly changing environments with this * **Effortless Integration**: Tie your configuration process into deployment tools and scripting languages for smooth automation. -### Quick Setup Example: HTTP Transport Configuration +#### Quick Setup Example: HTTP Transport Configuration Let’s see dynamic environment variables in action! Suppose you need to configure your client to send events to an HTTP endpoint with basic authentication. Here’s how simple it is using environment variables: @@ -99,7 +99,7 @@ transport: ``` -### Configuring CompositeTransport with Aliases for Ultimate Flexibility +#### Configuring CompositeTransport with Aliases for Ultimate Flexibility In Python, configuring CompositeTransport is a breeze with aliases for `OPENLINEAGE_URL`, `OPENLINEAGE_API_KEY`, and `OPENLINEAGE_ENDPOINT`. It allows you to create a default configuration for the default HTTP transport and easily add extra targets. @@ -122,7 +122,7 @@ OPENLINEAGE__TRANSPORT__TRANSPORTS__ANOTHER_TARGET__COMPRESSION=gzip You’ve just configured a CompositeTransport with two targets: `default_http` and `another_target`. The best part? You didn’t have to touch your config files! -### Troubleshooting +#### Troubleshooting Got issues? Here’s how to troubleshoot common problems with OpenLineage configuration: @@ -131,7 +131,7 @@ Got issues? Here’s how to troubleshoot common problems with OpenLineage config * **Client Permissions**: Ensure your OpenLineage client has access to read the environment variables, especially in containers like Docker. * **Environment Variable Verification**: Confirm all necessary environment variables are set in the correct environment. -## Wrap up: Embrace Dynamic Configuration +### Wrap up: Embrace Dynamic Configuration Dynamic environment variables offer a powerful and flexible approach to configuring the OpenLineage client. By leveraging this feature, you can streamline your setup, improve maintainability, and adapt configurations seamlessly within your dynamic environments. Say goodbye to complex configuration files and hello to a more efficient and scalable OpenLineage setup! diff --git a/website/blog/explore-lineage-api/index.mdx b/website/blog/explore-lineage-api/index.mdx index 84f31bfaa7..a69f8736c7 100644 --- a/website/blog/explore-lineage-api/index.mdx +++ b/website/blog/explore-lineage-api/index.mdx @@ -42,7 +42,7 @@ history to see how changes in one part of the map cause ripples in other areas. recent changes to the Marquez API, we’ll demonstrate how to diagnose job failures and how to explore the impact of code changes on downstream dependents. -## Getting Started +### Getting Started To get started, we need a running instance of Marquez with a little bit of seed data. For these exercises, we'll assume you have a terminal with the following programs installed @@ -73,7 +73,7 @@ seed-marquez-with-metadata exited with code 0 Once the seed job is done, we can begin exploring the API. -### The Jobs +#### The Jobs In a separate terminal window, type the following command ```bash curl "http://localhost:5000/api/v1/namespaces/food_delivery/jobs/" | jq | less @@ -169,7 +169,7 @@ The response includes the set of input and output datasets, as well as the curre ``` If a new version of the job is created, any or all of these fields can change. -### The Job Run +#### The Job Run The next thing to notice is the `latestRun` field. This includes information about the latest Run of this job: ```json @@ -215,7 +215,7 @@ The other important field to notice in the Run structure is the `state` ``` Uh-oh. Looks like the last time this job ran, it failed. -## Tracing Failures +### Tracing Failures The first question we have when diagnosing a failure is @@ -289,7 +289,7 @@ and the `outputVersions`. ``` These fields give us what we need to trace the lineage of the specific job runs we want to compare. -### Job Versions +#### Job Versions The first thing to look at is the `jobVersion`. Nearly 100% of the time, a job failure can be traced to a code change. Let's compare the job version of the failed run with the job version of the successful one: @@ -341,7 +341,7 @@ when the run completes, _even if the job run failed_. Sometimes this has no impa as the output datasets can be determined before the job run executes. But sometimes we see impacts like this where a job run failed before we had a chance to discover the output datasets. -## Tracing Upstream Lineage +### Tracing Upstream Lineage So what gives? The job code didn't actually change! So what caused the failure? @@ -350,7 +350,7 @@ Here's where the lineage tracking becomes useful. Recall again, the run output g We already know that the `outputVersions` is empty because the latest failed run didn't have a chance to determine the outputs. But we can take a look at the input datasets. -### Dataset Versions +#### Dataset Versions ```bash diff <(curl -s "http://localhost:5000/api/v1/jobs/runs/$FAILED_RUN_ID" | jq -r '.inputVersions') \ <(curl -s "http://localhost:5000/api/v1/jobs/runs/$SUCCESSFUL_RUN_ID" | jq -r '.inputVersions') diff --git a/website/blog/extending-with-facets/index.mdx b/website/blog/extending-with-facets/index.mdx index 99553961ea..39410577b5 100644 --- a/website/blog/extending-with-facets/index.mdx +++ b/website/blog/extending-with-facets/index.mdx @@ -8,13 +8,13 @@ Facets are a self-contained definition of one aspect of a job, dataset, or run a -# Building consensus +## Building consensus OpenLineage is an open source project, part of the LFAI&Data foundation, that standardizes lineage collection in the data ecosystem. In this increasingly rich ecosystem - that includes SQL-driven data warehouses, programmatic data processing frameworks like Spark or Pandas, and machine learning - it is near-impossible to maintain a clear and sane view of data lineage across everything without the collaboration of the ecosystem around a shared standard. Open source collaboration is a very powerful mechanism that can produce widely-adopted standard APIs. [OpenLineage draws a clear parallel with OpenTelemetry](https://openlineage.io/blog/openlineage-takes-inspiration-from-opentelemetry/) which provides a standard API to collect traces and metrics in the service world. It also draws from the experience of the Apache Parquet and Apache Arrow projects, which aim to define standard columnar data representations at-rest and in-memory. -## Open Source +### Open Source Standardizing an API through open source collaboration can be challenging. On one end, you need to get input and feedback from the people who will use the API in different contexts. On the other, you want to avoid getting stuck in disagreements arising from the different and sometimes incompatible viewpoints that inevitably drive these discussions. Thankfully, there are mechanisms to help organize and decouple those disagreements and drive discussions towards conclusion. A community driven open source project works very differently from a product you buy off the shelf. At the very moment you start using it - maybe starting by reading the doc - you become part of the community and start sharing a little bit of ownership. As with any software, you might encounter problems... but in this case, you immediately become part of the solution. In a healthy community, how much of the solution you become is entirely up to you. @@ -25,7 +25,7 @@ One of the success factors of such an open source project is how much it can min In a multi-faceted domain like data lineage, enabling others to lead discussions is critical. -## Making progress +### Making progress In this context, we need mechanisms to converge often and make incremental progress. You definitely want to avoid having a big monolithic spec that takes a long time to reach consensus on - if you ever do. A discussion around a large ultra-spec that combines specifications from multiple related domains will lose steam. We need to keep conversations focused on the topics that individual contributors care about. It is critical to subdivide the specification in concrete and granular decision points where consistent and significant progress can be made. @@ -36,7 +36,7 @@ This will keep conversations focused and moving. It also decouples workstreams w For example the contributors interested in data quality might be different from the ones interested in column-level lineage or query performance. -## Embracing different points of view +### Embracing different points of view Depending on their perspective, contributors may have very different opinions on how to model a certain aspect of data. Or they may have different use-cases in mind. Instead of pitting different view-points against each other and forcing alignment on every point, it is sometimes beneficial to allow them to be expressed separately. For example, when you ask a data practitioner "what is data lineage?" they may have very different definitions for it. @@ -46,7 +46,7 @@ For example, when you ask a data practitioner "what is data lineage?" they may h All those are valid view points that deserve to be captured appropriately and can be defined independently in a framework that allows them to cohabitate. -# Mechanics +## Mechanics OpenLineage is purposefully providing a faceted model around a minimalistic core spec to enable this granular decision making, minimize friction in contributing, and favor community-driven improvements. @@ -57,25 +57,25 @@ The core spec focuses on high-level modeling of jobs, runs, datasets, and their **Facets** are pieces of metadata that can be attached to those core entities. Facets have their own schema and capture various aspects of those entities. -## Facets are individual atomic specs +### Facets are individual atomic specs Like the core model, facets are defined by a `JSONSchema`. They are a self-contained definition of one aspect of a job, a dataset, or a run at the time the event happened. They make the model extensible. The notion of facets is powerful because it makes it easy to add more information to the model - you just define a new facet. There’s a clear compatibility model when introducing a new facet, since fields that are defined at the same time are grouped together. For example, there’s a facet to capture the schema of a dataset. There’s a facet to capture the version of a job in source control. There’s a facet to capture the parameters of a run. Facets are optional and may not apply to every instance of an entity. -## Facets enable specialization of models +### Facets enable specialization of models The core entities are fairly generic. A dataset might be a table in a warehouse or a topic in a Kafka broker. A job might be a SQL query or a machine learning training job. This generic high level model of lineage can be specialized by adding facets for that particular type of entity. At-rest data might be versioned, enabling transactions at the run level. Streaming data might capture the offsets and partitions where a streaming job started reading. Datasets might have a schema like a warehouse table, or not (for example, in the case of a machine learning model). By capturing a generic representation of lineage and allowing progressive specialization of those entities, this approach offers a lot of flexibility. -## Facets allow expressing different point of views +### Facets allow expressing different point of views There can be divergent points of view on how to model a certain aspect of metadata. Facets allow these models to cohabitate in a common framework. One example of this is capturing the physical plan of a query execution. Each data warehouse might have its own unique way of describing execution plans. It is very valuable to be able to capture both a precise (but maybe too specific) model as well as a generic (but possibly imprecise or lossy) representation. They can be captured as two different facets. This also gives us opportunities to define several competing models and use the resulting information to collaborate on a more unified and generic representation. This emergent modeling is actually extremely useful in an open source setting, and as a way to make incremental progress. -## Custom facets make the model decentralized +### Custom facets make the model decentralized Most importantly, the OpenLineage spec allows custom facets that are defined elsewhere, completely outside of the spec. This allows others to extend the spec as-needed without having to coordinate with anyone or ask any permission from a governing body. They can make their own opinionated definition of an aspect of metadata. All that is required is that they publish a `JSONSchema` that describes their facets, prefixed by a unique namespace. This lowers the barrier to experimentation and encourages incremental progress by making the experimentation of others visible. The facets that become broadly useful can eventually be represented in the core spec. -# Contribute! +## Contribute! As a community, we’ve done our best to minimize friction when experimenting with or contributing to OpenLineage. We’re looking forward to seeing you join us as we make data lineage transparent across the data ecosystem. diff --git a/website/blog/openlineage-at-northwestern-mutual/index.mdx b/website/blog/openlineage-at-northwestern-mutual/index.mdx index c36daf90e3..581d69d310 100644 --- a/website/blog/openlineage-at-northwestern-mutual/index.mdx +++ b/website/blog/openlineage-at-northwestern-mutual/index.mdx @@ -10,19 +10,19 @@ Northwestern Mutual is building an Enterprise Data Platform. In this guest blog, I joined Northwestern Mutual last year to oversee the implementation and delivery of their Enterprise Data Platform (Unified Data Platform). With over 160 years of history, Northwestern Mutual has been serving our clients with insurance and investment products, as well as financial planning, advisory and consultation services. It goes without saying that the company has accumulated a vast amount of data over this time. Our team’s objective is to empower data analysts, data scientists, and data engineers with the platform capabilities they need to derive insights and garner value from many disparate data sources. -# Ready...Set...Go! +## Ready...Set...Go! So, where do you start? The industry has taught us a lot over the past 10+ years - *remember when on-premises Hadoop clusters were all the rage*? When revisiting the approach we took within our Data Platform Engineering teams, I see a lot of alignment to the [Data Engineering Manifesto](https://connectingdots.xyz/blog/posts/2021/05/the-data-engineering-manifesto/). A few principles really jump out: -### Embrace cloud managed services +#### Embrace cloud managed services Many of the foundational needs of an Enterprise Data Platform can be accomplished using a cloud-first mindset. While we may not all agree which cloud provider is best, we can all agree that the level of scale and sophistication accomplished around things like storage, compute, and redundancy are going to be MUCH greater when relying on a cloud provider than when rolling your own solution. -### We are software engineers +#### We are software engineers The Data Mesh evolution has reminded the industry that centralized data teams do not scale or empower anybody. With this principle in mind, our platform teams embraced full automation from the beginning and designed for self-service workflows. We do not want to become the bottleneck to insights; rather, we want to enable data owners to manage and share their datasets throughout the company. We want to empower data engineers with transformation and machine learning capabilities, so that they can author pipelines and deliver insights. -### Aim for simplicity through consistency +#### Aim for simplicity through consistency Traditionally, data platforms have gathered and constructed technical metadata based on events of the past. For example, there are many crawlers that will examine various database systems and build a catalog to make those datasets “discoverable.” Logs from various jobs can be parsed in *extremely specific ways* to identify datasets consumed and produced by a given pipeline to infer data lineage. @@ -30,7 +30,7 @@ We viewed these traditional methods as a massive impediment to activating DataOp We also recognize and appreciate the complexity of this portion of the platform and did not find it wise to build from the ground up. Especially with the industry momentum towards real-time data observability, why add another custom solution to the stack? With such an evolving technical landscape, it was important for us to avoid vendor lock to allow us flexibility in future decisions. -# NM hearts OL +## NM hearts OL When we first learned of the OpenLineage specification, we were very intrigued and hopeful. An open specification focused on observing real-time events AND unifying tools and frameworks?!? Fast forward nine months, and we cannot believe how much capability we have developed around data observability in such a brief time. Let me back up a little... @@ -38,7 +38,7 @@ Marquez is a metadata management framework that implements the OpenLineage speci We were able to quickly deploy this framework into our own environment, which provided us with several immediate wins. -### Flexible framework +#### Flexible framework Since it is aligned with the OpenLineage framework, Marquez can process messages from ANY data producer that is publishing compliant events. The Marquez and OpenLineage communities have been doing an excellent job maturing the integration library, which allows you to tackle this challenge at the infrastructure level. This is the ultimate easy button approach and our own ideal state; configure an environment on behalf of your user base and sit back while it automatically detects and logs the activity within! @@ -46,7 +46,7 @@ In the cases when an integration either does not exist or you need to address a For example, our teams have been able to programmatically construct OpenLineage messages within code that pulls data from various on-premises database servers and publishes it into our Data Platform. Using the OpenLineage specification, we extract the actual table schema from the source system as part of the `Dataset` entity and log the executing SQL query as part of the `Job` entity. This code was simplistic and allowed us to meet our immediate needs around observing data movement and recording those event details. -### Alignment with enterprise +#### Alignment with enterprise Marquez already supported Kubernetes when we got involved, which provided us with many different deployment options. Our first contributions to the project were made to mature the [Helm chart](https://github.com/MarquezProject/marquez/tree/main/chart) and to enhance security around the base images and Kubernetes secrets usage. @@ -54,13 +54,13 @@ These changes allowed us to fully automate our deployments using GitOps and inco The flexibility offered by the Marquez deployment architecture and our ability to customize its details allowed us to activate new production use cases in about a month. We were happy with this timeline, given the series of security checkpoints that were validated and the wealth of functionality we had just unlocked. -### Collaborative working group +#### Collaborative working group Both the Marquez and OpenLineage communities have been extremely welcoming, and that has been a huge factor in our success at Northwestern Mutual. Our feedback and ideas have been encouraged and heard, which is evidenced by evolving project roadmaps and accepted developer contributions. We have learned quite a bit from the community members and feel fortunate to be a part of this group. Monthly community meetings are informative yet have an amazingly informal feel to them. -# Where are we headed? +## Where are we headed? The Unified Data Platform at Northwestern Mutual relies on the OpenLineage standard to formulate technical metadata within our various platform services. Publishing these events into Marquez has provided us with an effortless way to understand our running jobs. We can easily trace a downstream dataset to the job that produced it, as well as examine individual runs of that job or any preceding ones. diff --git a/website/blog/openlineage-spark/index.mdx b/website/blog/openlineage-spark/index.mdx index 87c658d677..44d850572c 100644 --- a/website/blog/openlineage-spark/index.mdx +++ b/website/blog/openlineage-spark/index.mdx @@ -8,7 +8,7 @@ Spark ushered in a brand new age of data democratization... and left us with a m -## The Age of Data Democratization +### The Age of Data Democratization In 2015, Apache Spark seemed to be taking over the world. Many of us had spent the prior few years moving our large datasets out of the Data Warehouse into "Data Lakes"- repositories of structured and unstructured data in @@ -29,7 +29,7 @@ tools to process raw data in object stores without the dependency on software en attracted to its ability to perform multiple operations on data without the I/O overhead of alternatives, like Pig or Hive, data scientists were thrilled to start piping that data through their NumPy and Pandas scripts. -## A Colossal Mess +### A Colossal Mess Of course, the natural consequence of this data democratization is that it becomes difficult to keep track of who is using the data and for what purpose. Hidden dependencies and Hyrum’s Law suddenly meant that changes to the data schema @@ -43,7 +43,7 @@ both producers and consumers of data about the state of that data and the potent bugs. Naturally, support for Apache Spark seemed like a good idea and, while the Spark 2.4 branch has been supported for some time, the recent OpenLineage 0.3 release has explicit support for Spark 3.1. 🎉 -## Getting Started +### Getting Started Our approach to integrating with Spark is not super novel nor is it complicated to integrate into your own system. Spark has had a SparkListener interface since before the 1.x days. If you're a heavy Spark user, it's likely you're already @@ -60,7 +60,7 @@ spark.openlineage.transport.url {your.openlineage.url} spark.openlineage.transport.type 'http' spark.openlineage.namespace {your.openlineage.namespace} ``` -## The Demo +### The Demo Trying out the Spark integration is super easy if you already have Docker Desktop and git installed. To follow along with this demo, you’ll also need a Google Cloud account and a Service Account JSON key file for an account that has diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index f97b4c304c..d0db47d033 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -1,8 +1,7 @@ // @ts-check // Note: type annotations allow type checking and IDEs autocompletion -const lightCodeTheme = require('prism-react-renderer/themes/github'); -const darkCodeTheme = require('prism-react-renderer/themes/dracula'); +const prism = require('prism-react-renderer'); const links = [ { to: '/getting-started', label: 'Getting Started', position: 'left' }, @@ -130,8 +129,8 @@ const config = { ], }, prism: { - theme: lightCodeTheme, - darkTheme: darkCodeTheme, + theme: prism.themes.github, + darkTheme: prism.themes.dracula, additionalLanguages: ['java'], }, colorMode: { diff --git a/website/package.json b/website/package.json index 203a2af42c..ffc98bb9e9 100644 --- a/website/package.json +++ b/website/package.json @@ -17,31 +17,31 @@ }, "dependencies": { "@babel/runtime": "^7.23.5", - "@docusaurus/core": "^2.4.0", - "@docusaurus/preset-classic": "^2.4.0", - "@docusaurus/theme-common": "^2.4.0", - "@emotion/react": "^11.11.1", - "@emotion/styled": "^11.11.0", + "@docusaurus/core": "^3.6.0", + "@docusaurus/plugin-content-blog": "^3.6.0", + "@docusaurus/preset-classic": "^3.6.0", + "@docusaurus/theme-common": "^3.6.0", + "@emotion/react": "^11.13.3", + "@emotion/styled": "^11.13.0", "@fontsource/roboto": "^5.0.8", - "@mdx-js/mdx": "^2.3.0", - "@mdx-js/react": "^1.6.22", + "@mdx-js/mdx": "^3.1.0", + "@mdx-js/react": "^3.1.0", "@mui/icons-material": "^5.14.19", "@mui/material": "^5.14.18", - "classnames": "^2.3.2", - "clsx": "^1.2.0", - "docusaurus-lunr-search": "^2.3.2", - "prism-react-renderer": "^1.3.5", - "react": "^17.0.2", - "react-dom": "^17.0.2", + "classnames": "^2.5.1", + "docusaurus-lunr-search": "^3.5.0", + "prism-react-renderer": "^2.4.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", "react-feather": "^2.0.9", - "react-markdown": "^8.0.5", + "react-markdown": "^9.0.1", "tailwind-theme-switcher": "^1.0.2", "tailwindcss": "^2.2.16" }, "devDependencies": { - "@docusaurus/module-type-aliases": "^2.4.0", - "@tsconfig/docusaurus": "^1.0.5", - "typescript": "^4.7.4" + "@docusaurus/module-type-aliases": "^3.6.0", + "@tsconfig/docusaurus": "^2.0.3", + "typescript": "^5.6.3" }, "browserslist": { "production": [ diff --git a/website/plugins/home-blog-plugin.js b/website/plugins/home-blog-plugin.js index b13e51c8bc..ce4be24453 100644 --- a/website/plugins/home-blog-plugin.js +++ b/website/plugins/home-blog-plugin.js @@ -12,53 +12,21 @@ async function blogPluginExtended(...pluginArgs) { * Override the default `contentLoaded` hook to access blog posts data */ contentLoaded: async function (data) { - const blogPosts = data.allContent['docusaurus-plugin-content-blog'].default.blogPosts; - // Get the 6 latest blog posts - const recentPosts = [...blogPosts].splice(0, 6); - + // serve main page from / and /openlineage-site data.actions.addRoute({ path: "/", exact: true, // The component to use for the "Home" page route component: "@site/src/pages/home.tsx", - - // These are the props that will be passed to our "Home" page component - modules: { - recentPosts: recentPosts.map((post) => ({ - content: { - __import: true, - // The markdown file for the blog post will be loaded by webpack - path: post.metadata.source, - query: { - truncated: true, - }, - }, - })), - }, }); - // serve main page from / and /openlineage-site data.actions.addRoute({ path: "/openlineage-site", exact: true, // The component to use for the "Home" page route component: "@site/src/pages/home.tsx", - - // These are the props that will be passed to our "Home" page component - modules: { - recentPosts: recentPosts.map((post) => ({ - content: { - __import: true, - // The markdown file for the blog post will be loaded by webpack - path: post.metadata.source, - query: { - truncated: true, - }, - }, - })), - }, }); // Call the default overridden `contentLoaded` implementation diff --git a/website/src/css/custom.css b/website/src/css/custom.css index 5583fe950b..6539713471 100644 --- a/website/src/css/custom.css +++ b/website/src/css/custom.css @@ -253,87 +253,87 @@ code .token-line { @apply w-full; } -#post-content p { +article div p { @apply my-3; } -#post-content h1, h2, h3, h4, h5, h6 { +article div h1, h2, h3, h4, h5, h6 { @apply text-color-2 my-8; } -#post-content h1 { +article div h1 { @apply text-5xl; } -#post-content h2 { +article div h2 { @apply text-4xl; } -#post-content h3 { +article div h3 { @apply text-3xl; } -#post-content h4 { +article div h4 { @apply text-2xl; } -#post-content h5 { +article div h5 { @apply text-xl; } -#post-content h5 { +article div h5 { @apply text-lg; } -#post-content hr { +article div hr { @apply my-4 border-medium; } -#post-content ul { +article div ul { list-style: disc; } -#post-content ol { +article div ol { list-style: decimal; } -#post-content ul, #post-content ol { +article div ul, article div ol { @apply ml-4; } -#post-content ul ul, #post-content ol ol { +article div ul ul, article div ol ol { @apply ml-3; } -#post-content li { +article div li { @apply my-2; margin-left: 1.5rem; } -#post-content table { +article div table { @apply w-full my-6; } -#post-content table tr { +article div table tr { border-bottom: 2px solid #eee; @apply border-medium; } -.theme-dark #post-content table tr { +.theme-dark article div table tr { border-bottom: 2px solid #444; } -#post-content table tr td, #post-content table tr th { +article div table tr td, article div table tr th { @apply p-2 py-3 text-left; } -#post-content blockquote{ +article div blockquote{ @apply border-l-4 border-primary p-3; padding: 0rem 0.75rem; margin: 1.5rem 0; } -#post-content iframe { +article div iframe { max-width: 100%; } @@ -368,11 +368,11 @@ code .token-line { } } -#post-content a { +article div a { @apply text-color-2 my-4; } -#post-content pre { +article div pre { padding: 1rem; margin: 1rem; } diff --git a/website/src/theme/SearchBar/DocSearch.js b/website/src/theme/SearchBar/DocSearch.js deleted file mode 100644 index ec627d480d..0000000000 --- a/website/src/theme/SearchBar/DocSearch.js +++ /dev/null @@ -1,297 +0,0 @@ -import Hogan from "hogan.js"; -import LunrSearchAdapter from "./lunar-search"; -import autocomplete from "autocomplete.js"; -import templates from "./templates"; -import utils from "./utils"; -import $ from "autocomplete.js/zepto"; - -class DocSearch { - constructor({ - searchDocs, - searchIndex, - inputSelector, - debug = false, - baseUrl = '/', - queryDataCallback = null, - autocompleteOptions = { - debug: false, - hint: false, - autoselect: true - }, - transformData = false, - queryHook = false, - handleSelected = false, - enhancedSearchInput = false, - layout = "collumns" - }) { - this.input = DocSearch.getInputFromSelector(inputSelector); - this.queryDataCallback = queryDataCallback || null; - const autocompleteOptionsDebug = - autocompleteOptions && autocompleteOptions.debug - ? autocompleteOptions.debug - : false; - // eslint-disable-next-line no-param-reassign - autocompleteOptions.debug = debug || autocompleteOptionsDebug; - this.autocompleteOptions = autocompleteOptions; - this.autocompleteOptions.cssClasses = - this.autocompleteOptions.cssClasses || {}; - this.autocompleteOptions.cssClasses.prefix = - this.autocompleteOptions.cssClasses.prefix || "ds"; - const inputAriaLabel = - this.input && - typeof this.input.attr === "function" && - this.input.attr("aria-label"); - this.autocompleteOptions.ariaLabel = - this.autocompleteOptions.ariaLabel || inputAriaLabel || "search input"; - - this.isSimpleLayout = layout === "simple"; - - this.client = new LunrSearchAdapter(searchDocs, searchIndex, baseUrl); - - if (enhancedSearchInput) { - this.input = DocSearch.injectSearchBox(this.input); - } - this.autocomplete = autocomplete(this.input, autocompleteOptions, [ - { - source: this.getAutocompleteSource(transformData, queryHook), - templates: { - suggestion: DocSearch.getSuggestionTemplate(this.isSimpleLayout), - footer: templates.footer, - empty: DocSearch.getEmptyTemplate() - } - } - ]); - - const customHandleSelected = handleSelected; - this.handleSelected = customHandleSelected || this.handleSelected; - - // We prevent default link clicking if a custom handleSelected is defined - if (customHandleSelected) { - $(".algolia-autocomplete").on("click", ".ds-suggestions a", event => { - event.preventDefault(); - }); - } - - this.autocomplete.on( - "autocomplete:selected", - this.handleSelected.bind(null, this.autocomplete.autocomplete) - ); - - this.autocomplete.on( - "autocomplete:shown", - this.handleShown.bind(null, this.input) - ); - - if (enhancedSearchInput) { - DocSearch.bindSearchBoxEvent(); - } - } - - static injectSearchBox(input) { - input.before(templates.searchBox); - const newInput = input - .prev() - .prev() - .find("input"); - input.remove(); - return newInput; - } - - static bindSearchBoxEvent() { - $('.searchbox [type="reset"]').on("click", function () { - $("input#docsearch").focus(); - $(this).addClass("hide"); - autocomplete.autocomplete.setVal(""); - }); - - $("input#docsearch").on("keyup", () => { - const searchbox = document.querySelector("input#docsearch"); - const reset = document.querySelector('.searchbox [type="reset"]'); - reset.className = "searchbox__reset"; - if (searchbox.value.length === 0) { - reset.className += " hide"; - } - }); - } - - /** - * Returns the matching input from a CSS selector, null if none matches - * @function getInputFromSelector - * @param {string} selector CSS selector that matches the search - * input of the page - * @returns {void} - */ - static getInputFromSelector(selector) { - const input = $(selector).filter("input"); - return input.length ? $(input[0]) : null; - } - - /** - * Returns the `source` method to be passed to autocomplete.js. It will query - * the Algolia index and call the callbacks with the formatted hits. - * @function getAutocompleteSource - * @param {function} transformData An optional function to transform the hits - * @param {function} queryHook An optional function to transform the query - * @returns {function} Method to be passed as the `source` option of - * autocomplete - */ - getAutocompleteSource(transformData, queryHook) { - return (query, callback) => { - if (queryHook) { - // eslint-disable-next-line no-param-reassign - query = queryHook(query) || query; - } - this.client.search(query).then(hits => { - if ( - this.queryDataCallback && - typeof this.queryDataCallback == "function" - ) { - this.queryDataCallback(hits); - } - if (transformData) { - hits = transformData(hits) || hits; - } - callback(DocSearch.formatHits(hits)); - }); - }; - } - - // Given a list of hits returned by the API, will reformat them to be used in - // a Hogan template - static formatHits(receivedHits) { - const clonedHits = utils.deepClone(receivedHits); - const hits = clonedHits.map(hit => { - if (hit._highlightResult) { - // eslint-disable-next-line no-param-reassign - hit._highlightResult = utils.mergeKeyWithParent( - hit._highlightResult, - "hierarchy" - ); - } - return utils.mergeKeyWithParent(hit, "hierarchy"); - }); - - // Group hits by category / subcategory - let groupedHits = utils.groupBy(hits, "lvl0"); - $.each(groupedHits, (level, collection) => { - const groupedHitsByLvl1 = utils.groupBy(collection, "lvl1"); - const flattenedHits = utils.flattenAndFlagFirst( - groupedHitsByLvl1, - "isSubCategoryHeader" - ); - groupedHits[level] = flattenedHits; - }); - groupedHits = utils.flattenAndFlagFirst(groupedHits, "isCategoryHeader"); - - // Translate hits into smaller objects to be send to the template - return groupedHits.map(hit => { - const url = DocSearch.formatURL(hit); - const category = utils.getHighlightedValue(hit, "lvl0"); - const subcategory = utils.getHighlightedValue(hit, "lvl1") || category; - const displayTitle = utils - .compact([ - utils.getHighlightedValue(hit, "lvl2") || subcategory, - utils.getHighlightedValue(hit, "lvl3"), - utils.getHighlightedValue(hit, "lvl4"), - utils.getHighlightedValue(hit, "lvl5"), - utils.getHighlightedValue(hit, "lvl6") - ]) - .join( - '' - ); - const text = utils.getSnippetedValue(hit, "content"); - const isTextOrSubcategoryNonEmpty = - (subcategory && subcategory !== "") || - (displayTitle && displayTitle !== ""); - const isLvl1EmptyOrDuplicate = - !subcategory || subcategory === "" || subcategory === category; - const isLvl2 = - displayTitle && displayTitle !== "" && displayTitle !== subcategory; - const isLvl1 = - !isLvl2 && - (subcategory && subcategory !== "" && subcategory !== category); - const isLvl0 = !isLvl1 && !isLvl2; - - return { - isLvl0, - isLvl1, - isLvl2, - isLvl1EmptyOrDuplicate, - isCategoryHeader: hit.isCategoryHeader, - isSubCategoryHeader: hit.isSubCategoryHeader, - isTextOrSubcategoryNonEmpty, - category, - subcategory, - title: displayTitle, - text, - url - }; - }); - } - - static formatURL(hit) { - const { url, anchor } = hit; - if (url) { - const containsAnchor = url.indexOf("#") !== -1; - if (containsAnchor) return url; - else if (anchor) return `${hit.url}#${hit.anchor}`; - return url; - } else if (anchor) return `#${hit.anchor}`; - /* eslint-disable */ - console.warn("no anchor nor url for : ", JSON.stringify(hit)); - /* eslint-enable */ - return null; - } - - static getEmptyTemplate() { - return args => Hogan.compile(templates.empty).render(args); - } - - static getSuggestionTemplate(isSimpleLayout) { - const stringTemplate = isSimpleLayout - ? templates.suggestionSimple - : templates.suggestion; - const template = Hogan.compile(stringTemplate); - return suggestion => template.render(suggestion); - } - - handleSelected(input, event, suggestion, datasetNumber, context = {}) { - // Do nothing if click on the suggestion, as it's already a , the - // browser will take care of it. This allow Ctrl-Clicking on results and not - // having the main window being redirected as well - if (context.selectionMethod === "click") { - return; - } - - input.setVal(""); - window.location.assign(suggestion.url); - } - - handleShown(input) { - const middleOfInput = input.offset().left + input.width() / 2; - let middleOfWindow = $(document).width() / 2; - - if (isNaN(middleOfWindow)) { - middleOfWindow = 900; - } - - const alignClass = - middleOfInput - middleOfWindow >= 0 - ? "algolia-autocomplete-right" - : "algolia-autocomplete-left"; - const otherAlignClass = - middleOfInput - middleOfWindow < 0 - ? "algolia-autocomplete-right" - : "algolia-autocomplete-left"; - const autocompleteWrapper = $(".algolia-autocomplete"); - if (!autocompleteWrapper.hasClass(alignClass)) { - autocompleteWrapper.addClass(alignClass); - } - - if (autocompleteWrapper.hasClass(otherAlignClass)) { - autocompleteWrapper.removeClass(otherAlignClass); - } - } -} - -export default DocSearch; diff --git a/website/src/theme/SearchBar/algolia.css b/website/src/theme/SearchBar/algolia.css deleted file mode 100644 index c5590cfa48..0000000000 --- a/website/src/theme/SearchBar/algolia.css +++ /dev/null @@ -1,526 +0,0 @@ -/* Bottom border of each suggestion */ -.algolia-docsearch-suggestion { - border-bottom-color: #3a3dd1; -} -/* Main category headers */ -.algolia-docsearch-suggestion--category-header { - background-color: #4b54de; -} -/* Highlighted search terms */ -.algolia-docsearch-suggestion--highlight { - color: #3a33d1; -} -/* Highligted search terms in the main category headers */ -.algolia-docsearch-suggestion--category-header - .algolia-docsearch-suggestion--highlight { - background-color: #4d47d5; -} -/* Currently selected suggestion */ -.aa-cursor .algolia-docsearch-suggestion--content { - color: #272296; -} -.aa-cursor .algolia-docsearch-suggestion { - background: #ebebfb; -} - -/* For bigger screens, when displaying results in two columns */ -@media (min-width: 768px) { - /* Bottom border of each suggestion */ - .algolia-docsearch-suggestion { - border-bottom-color: #7671df; - } - /* Left column, with secondary category header */ - .algolia-docsearch-suggestion--subcategory-column { - border-right-color: #7671df; - color: #4e4726; - } -} - -.searchbox { - display: inline-block; - position: relative; - width: 200px; - height: 32px !important; - white-space: nowrap; - box-sizing: border-box; - visibility: visible !important; -} - -.searchbox .algolia-autocomplete { - display: block; - width: 100%; - height: 100%; -} - -.searchbox__wrapper { - width: 100%; - height: 100%; - z-index: 999; - position: relative; -} - -.searchbox__input { - display: inline-block; - box-sizing: border-box; - -webkit-transition: box-shadow 0.4s ease, background 0.4s ease; - transition: box-shadow 0.4s ease, background 0.4s ease; - border: 0; - border-radius: 16px; - box-shadow: inset 0 0 0 1px #cccccc; - background: #ffffff !important; - padding: 0; - padding-right: 26px; - padding-left: 32px; - width: 100%; - height: 100%; - vertical-align: middle; - white-space: normal; - font-size: 12px; - -webkit-appearance: none; - -moz-appearance: none; - appearance: none; -} - -.searchbox__input::-webkit-search-decoration, -.searchbox__input::-webkit-search-cancel-button, -.searchbox__input::-webkit-search-results-button, -.searchbox__input::-webkit-search-results-decoration { - display: none; -} - -.searchbox__input:hover { - box-shadow: inset 0 0 0 1px #b3b3b3; -} - -.searchbox__input:focus, -.searchbox__input:active { - outline: 0; - box-shadow: inset 0 0 0 1px #aaaaaa; - background: #ffffff; -} - -.searchbox__input::-webkit-input-placeholder { - color: #aaaaaa; -} - -.searchbox__input::-moz-placeholder { - color: #aaaaaa; -} - -.searchbox__input:-ms-input-placeholder { - color: #aaaaaa; -} - -.searchbox__input::placeholder { - color: #aaaaaa; -} - -.searchbox__submit { - position: absolute; - top: 0; - margin: 0; - border: 0; - border-radius: 16px 0 0 16px; - background-color: rgba(69, 142, 225, 0); - padding: 0; - width: 32px; - height: 100%; - vertical-align: middle; - text-align: center; - font-size: inherit; - -webkit-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; - right: inherit; - left: 0; -} - -.searchbox__submit::before { - display: inline-block; - margin-right: -4px; - height: 100%; - vertical-align: middle; - content: ''; -} - -.searchbox__submit:hover, -.searchbox__submit:active { - cursor: pointer; -} - -.searchbox__submit:focus { - outline: 0; -} - -.searchbox__submit svg { - width: 14px; - height: 14px; - vertical-align: middle; - fill: #6d7e96; -} - -.searchbox__reset { - display: block; - position: absolute; - top: 8px; - right: 8px; - margin: 0; - border: 0; - background: none; - cursor: pointer; - padding: 0; - font-size: inherit; - -webkit-user-select: none; - -moz-user-select: none; - -ms-user-select: none; - user-select: none; - fill: rgba(0, 0, 0, 0.5); -} - -.searchbox__reset.hide { - display: none; -} - -.searchbox__reset:focus { - outline: 0; -} - -.searchbox__reset svg { - display: block; - margin: 4px; - width: 8px; - height: 8px; -} - -.searchbox__input:valid ~ .searchbox__reset { - display: block; - -webkit-animation-name: sbx-reset-in; - animation-name: sbx-reset-in; - -webkit-animation-duration: 0.15s; - animation-duration: 0.15s; -} - -@-webkit-keyframes sbx-reset-in { - 0% { - -webkit-transform: translate3d(-20%, 0, 0); - transform: translate3d(-20%, 0, 0); - opacity: 0; - } - 100% { - -webkit-transform: none; - transform: none; - opacity: 1; - } -} - -@keyframes sbx-reset-in { - 0% { - -webkit-transform: translate3d(-20%, 0, 0); - transform: translate3d(-20%, 0, 0); - opacity: 0; - } - 100% { - -webkit-transform: none; - transform: none; - opacity: 1; - } -} - -.algolia-autocomplete .ds-dropdown-menu:before { - display: block; - position: absolute; - content: ''; - width: 14px; - height: 14px; - background: #373940; - z-index: 1000; - top: -7px; - border-top: 1px solid #373940; - border-right: 1px solid #373940; - -webkit-transform: rotate(-45deg); - transform: rotate(-45deg); - border-radius: 2px; -} - -.algolia-autocomplete .ds-dropdown-menu { - box-shadow: 0 1px 0 0 rgba(0, 0, 0, 0.2), 0 2px 3px 0 rgba(0, 0, 0, 0.1); -} - -@media (min-width: 601px) { - .algolia-autocomplete.algolia-autocomplete-right .ds-dropdown-menu { - right: 0 !important; - left: inherit !important; - } - - .algolia-autocomplete.algolia-autocomplete-right .ds-dropdown-menu:before { - right: 48px; - } - - .algolia-autocomplete .ds-dropdown-menu { - position: relative; - top: -6px; - border-radius: 4px; - margin: 6px 0 0; - padding: 0; - text-align: left; - height: auto; - position: relative; - background: transparent; - border: none; - z-index: 999; - max-width: 600px; - min-width: 500px; - } -} - -@media (max-width: 600px) { - .algolia-autocomplete .ds-dropdown-menu { - z-index: 100; - position: fixed !important; - top: 50px !important; - left: auto !important; - right: 1rem !important; - width: 600px; - max-width: calc(100% - 2rem); - max-height: calc(100% - 5rem); - display: block; - } - - .algolia-autocomplete .ds-dropdown-menu:before { - right: 6rem; - } -} - -.algolia-autocomplete .ds-dropdown-menu .ds-suggestions { - position: relative; - z-index: 1000; -} - -.algolia-autocomplete .ds-dropdown-menu .ds-suggestion { - cursor: pointer; -} - -.algolia-autocomplete .ds-dropdown-menu [class^='ds-dataset-'] { - position: relative; - border-radius: 4px; - overflow: auto; - padding: 0; - background: #ffffff; -} - -.algolia-autocomplete .ds-dropdown-menu * { - box-sizing: border-box; -} - -.algolia-autocomplete .algolia-docsearch-suggestion { - display: block; - position: relative; - padding: 0; - overflow: hidden; - text-decoration: none; -} - -.algolia-autocomplete .ds-cursor .algolia-docsearch-suggestion--wrapper { - background: #f1f1f1; - box-shadow: inset -2px 0 0 #61dafb; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--highlight { - background: #ffe564; - padding: 0.1em 0.05em; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion--category-header - .algolia-docsearch-suggestion--category-header-lvl0 - .algolia-docsearch-suggestion--highlight, -.algolia-autocomplete - .algolia-docsearch-suggestion--category-header - .algolia-docsearch-suggestion--category-header-lvl1 - .algolia-docsearch-suggestion--highlight { - color: inherit; - background: inherit; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion--text - .algolia-docsearch-suggestion--highlight { - padding: 0 0 1px; - background: inherit; - box-shadow: inset 0 -2px 0 0 rgba(69, 142, 225, 0.8); - color: inherit; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--content { - display: block; - float: right; - width: 70%; - position: relative; - padding: 5.33333px 0 5.33333px 10.66667px; - cursor: pointer; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--content:before { - content: ''; - position: absolute; - display: block; - top: 0; - height: 100%; - width: 1px; - background: #ececec; - left: -1px; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--category-header { - position: relative; - display: none; - font-size: 14px; - letter-spacing: 0.08em; - font-weight: 700; - background-color: #373940; - text-transform: uppercase; - color: #fff; - margin: 0; - padding: 5px 8px; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--wrapper { - background-color: #fff; - width: 100%; - float: left; - padding: 8px 0 0 0; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--subcategory-column { - float: left; - width: 30%; - display: none; - padding-left: 0; - text-align: right; - position: relative; - padding: 5.33333px 10.66667px; - color: #777; - font-size: 0.9em; - word-wrap: break-word; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--subcategory-column:before { - content: ''; - position: absolute; - display: block; - top: 0; - height: 100%; - width: 1px; - background: #ececec; - right: 0; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion.algolia-docsearch-suggestion__main - .algolia-docsearch-suggestion--category-header, -.algolia-autocomplete - .algolia-docsearch-suggestion.algolia-docsearch-suggestion__secondary { - display: block; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion--subcategory-column - .algolia-docsearch-suggestion--highlight { - background-color: inherit; - color: inherit; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--subcategory-inline { - display: none; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--title { - margin-bottom: 4px; - color: #02060c; - font-size: 0.9em; - font-weight: bold; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--text { - display: block; - line-height: 1.2em; - font-size: 0.85em; - color: #63676d; - padding-right: 2px; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--no-results { - width: 100%; - padding: 8px 0; - text-align: center; - font-size: 1.2em; - background-color: #373940; - margin-top: -8px; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion--no-results - .algolia-docsearch-suggestion--text { - color: #ffffff; - margin-top: 4px; -} - -.algolia-autocomplete .algolia-docsearch-suggestion--no-results::before { - display: none; -} - -.algolia-autocomplete .algolia-docsearch-suggestion code { - padding: 1px 5px; - font-size: 90%; - border: none; - color: #222222; - background-color: #ebebeb; - border-radius: 3px; - font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', - monospace; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion - code - .algolia-docsearch-suggestion--highlight { - background: none; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion.algolia-docsearch-suggestion__main - .algolia-docsearch-suggestion--category-header { - color: white; - display: block; -} - -.algolia-autocomplete - .algolia-docsearch-suggestion.algolia-docsearch-suggestion__secondary - .algolia-docsearch-suggestion--subcategory-column { - display: block; -} - -.algolia-autocomplete .algolia-docsearch-footer { - background-color: #fff; - width: 100%; - height: 30px; - z-index: 2000; - float: right; - font-size: 0; - line-height: 0; -} - -.algolia-autocomplete .algolia-docsearch-footer--logo { - background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 130 18'%3E%3Cdefs%3E%3ClinearGradient id='a' x1='-36.87%25' x2='129.43%25' y1='134.94%25' y2='-27.7%25'%3E%3Cstop stop-color='%252300AEFF' offset='0%25'/%3E%3Cstop stop-color='%25233369E7' offset='100%25'/%3E%3C/linearGradient%3E%3C/defs%3E%3Cg fill='none' fill-rule='evenodd'%3E%3Cpath fill='url(%2523a)' d='M59.4.02h13.3a2.37 2.37 0 0 1 2.38 2.37V15.6a2.37 2.37 0 0 1-2.38 2.36H59.4a2.37 2.37 0 0 1-2.38-2.36V2.38A2.37 2.37 0 0 1 59.4.02z'/%3E%3Cpath fill='%2523FFF' d='M66.26 4.56c-2.82 0-5.1 2.27-5.1 5.08 0 2.8 2.28 5.07 5.1 5.07 2.8 0 5.1-2.26 5.1-5.07 0-2.8-2.28-5.07-5.1-5.07zm0 8.65c-2 0-3.6-1.6-3.6-3.56 0-1.97 1.6-3.58 3.6-3.58 1.98 0 3.6 1.6 3.6 3.58a3.58 3.58 0 0 1-3.6 3.57zm0-6.4v2.66c0 .07.08.13.15.1l2.4-1.24c.04-.02.06-.1.03-.14a2.96 2.96 0 0 0-2.46-1.5c-.06 0-.1.05-.1.1zm-3.33-1.96l-.3-.3a.78.78 0 0 0-1.12 0l-.36.36a.77.77 0 0 0 0 1.1l.3.3c.05.05.13.04.17 0 .2-.25.4-.5.6-.7.23-.23.46-.43.7-.6.07-.04.07-.1.03-.16zm5-.8V3.4a.78.78 0 0 0-.78-.78h-1.83a.78.78 0 0 0-.78.78v.63c0 .07.06.12.14.1a5.74 5.74 0 0 1 1.58-.22c.52 0 1.04.07 1.54.2a.1.1 0 0 0 .13-.1z'/%3E%3Cpath fill='%2523182359' d='M102.16 13.76c0 1.46-.37 2.52-1.12 3.2-.75.67-1.9 1-3.44 1-.56 0-1.74-.1-2.67-.3l.34-1.7c.78.17 1.82.2 2.36.2.86 0 1.48-.16 1.84-.5.37-.36.55-.88.55-1.57v-.35a6.37 6.37 0 0 1-.84.3 4.15 4.15 0 0 1-1.2.17 4.5 4.5 0 0 1-1.6-.28 3.38 3.38 0 0 1-1.26-.82 3.74 3.74 0 0 1-.8-1.35c-.2-.54-.3-1.5-.3-2.2 0-.67.1-1.5.3-2.06a3.92 3.92 0 0 1 .9-1.43 4.12 4.12 0 0 1 1.45-.92 5.3 5.3 0 0 1 1.94-.37c.7 0 1.35.1 1.97.2a15.86 15.86 0 0 1 1.6.33v8.46zm-5.95-4.2c0 .9.2 1.88.6 2.3.4.4.9.62 1.53.62.34 0 .66-.05.96-.15a2.75 2.75 0 0 0 .73-.33V6.7a8.53 8.53 0 0 0-1.42-.17c-.76-.02-1.36.3-1.77.8-.4.5-.62 1.4-.62 2.23zm16.13 0c0 .72-.1 1.26-.32 1.85a4.4 4.4 0 0 1-.9 1.53c-.38.42-.85.75-1.4.98-.54.24-1.4.37-1.8.37-.43 0-1.27-.13-1.8-.36a4.1 4.1 0 0 1-1.4-.97 4.5 4.5 0 0 1-.92-1.52 5.04 5.04 0 0 1-.33-1.84c0-.72.1-1.4.32-2 .22-.6.53-1.1.92-1.5.4-.43.86-.75 1.4-.98a4.55 4.55 0 0 1 1.78-.34 4.7 4.7 0 0 1 1.8.34c.54.23 1 .55 1.4.97.38.42.68.92.9 1.5.23.6.35 1.3.35 2zm-2.2 0c0-.92-.2-1.7-.6-2.22-.38-.54-.94-.8-1.64-.8-.72 0-1.27.26-1.67.8-.4.54-.58 1.3-.58 2.22 0 .93.2 1.56.6 2.1.38.54.94.8 1.64.8s1.25-.26 1.65-.8c.4-.55.6-1.17.6-2.1zm6.97 4.7c-3.5.02-3.5-2.8-3.5-3.27L113.57.92l2.15-.34v10c0 .25 0 1.87 1.37 1.88v1.8zm3.77 0h-2.15v-9.2l2.15-.33v9.54zM119.8 3.74c.7 0 1.3-.58 1.3-1.3 0-.7-.58-1.3-1.3-1.3-.73 0-1.3.6-1.3 1.3 0 .72.58 1.3 1.3 1.3zm6.43 1c.7 0 1.3.1 1.78.27.5.18.88.42 1.17.73.28.3.5.74.6 1.18.13.46.2.95.2 1.5v5.47a25.24 25.24 0 0 1-1.5.25c-.67.1-1.42.15-2.25.15a6.83 6.83 0 0 1-1.52-.16 3.2 3.2 0 0 1-1.18-.5 2.46 2.46 0 0 1-.76-.9c-.18-.37-.27-.9-.27-1.44 0-.52.1-.85.3-1.2.2-.37.48-.67.83-.9a3.6 3.6 0 0 1 1.23-.5 7.07 7.07 0 0 1 2.2-.1l.83.16v-.35c0-.25-.03-.48-.1-.7a1.5 1.5 0 0 0-.3-.58c-.15-.18-.34-.3-.58-.4a2.54 2.54 0 0 0-.92-.17c-.5 0-.94.06-1.35.13-.4.08-.75.16-1 .25l-.27-1.74c.27-.1.67-.18 1.2-.28a9.34 9.34 0 0 1 1.65-.14zm.18 7.74c.66 0 1.15-.04 1.5-.1V10.2a5.1 5.1 0 0 0-2-.1c-.23.03-.45.1-.64.2a1.17 1.17 0 0 0-.47.38c-.13.17-.18.26-.18.52 0 .5.17.8.5.98.32.2.74.3 1.3.3zM84.1 4.8c.72 0 1.3.08 1.8.26.48.17.87.42 1.15.73.3.3.5.72.6 1.17.14.45.2.94.2 1.47v5.48a25.24 25.24 0 0 1-1.5.26c-.67.1-1.42.14-2.25.14a6.83 6.83 0 0 1-1.52-.16 3.2 3.2 0 0 1-1.18-.5 2.46 2.46 0 0 1-.76-.9c-.18-.38-.27-.9-.27-1.44 0-.53.1-.86.3-1.22.2-.36.5-.65.84-.88a3.6 3.6 0 0 1 1.24-.5 7.07 7.07 0 0 1 2.2-.1c.26.03.54.08.84.15v-.35c0-.24-.03-.48-.1-.7a1.5 1.5 0 0 0-.3-.58c-.15-.17-.34-.3-.58-.4a2.54 2.54 0 0 0-.9-.15c-.5 0-.96.05-1.37.12-.4.07-.75.15-1 .24l-.26-1.75c.27-.08.67-.17 1.18-.26a8.9 8.9 0 0 1 1.66-.15zm.2 7.73c.65 0 1.14-.04 1.48-.1v-2.17a5.1 5.1 0 0 0-1.98-.1c-.24.03-.46.1-.65.18a1.17 1.17 0 0 0-.47.4c-.12.17-.17.26-.17.52 0 .5.18.8.5.98.32.2.75.3 1.3.3zm8.68 1.74c-3.5 0-3.5-2.82-3.5-3.28L89.45.92 91.6.6v10c0 .25 0 1.87 1.38 1.88v1.8z'/%3E%3Cpath fill='%25231D3657' d='M5.03 11.03c0 .7-.26 1.24-.76 1.64-.5.4-1.2.6-2.1.6-.88 0-1.6-.14-2.17-.42v-1.2c.36.16.74.3 1.14.38.4.1.78.15 1.13.15.5 0 .88-.1 1.12-.3a.94.94 0 0 0 .35-.77.98.98 0 0 0-.33-.74c-.22-.2-.68-.44-1.37-.72-.72-.3-1.22-.62-1.52-1C.23 8.27.1 7.82.1 7.3c0-.65.22-1.17.7-1.55.46-.37 1.08-.56 1.86-.56.76 0 1.5.16 2.25.48l-.4 1.05c-.7-.3-1.32-.44-1.87-.44-.4 0-.73.08-.94.26a.9.9 0 0 0-.33.72c0 .2.04.38.12.52.08.15.22.3.42.4.2.14.55.3 1.06.52.58.24 1 .47 1.27.67.27.2.47.44.6.7.12.26.18.57.18.92zM9 13.27c-.92 0-1.64-.27-2.16-.8-.52-.55-.78-1.3-.78-2.24 0-.97.24-1.73.72-2.3.5-.54 1.15-.82 2-.82.78 0 1.4.25 1.85.72.46.48.7 1.14.7 1.97v.67H7.35c0 .58.17 1.02.46 1.33.3.3.7.47 1.24.47.36 0 .68-.04.98-.1a5.1 5.1 0 0 0 .98-.33v1.02a3.87 3.87 0 0 1-.94.32 5.72 5.72 0 0 1-1.08.1zm-.22-5.2c-.4 0-.73.12-.97.38s-.37.62-.42 1.1h2.7c0-.48-.13-.85-.36-1.1-.23-.26-.54-.38-.94-.38zm7.7 5.1l-.26-.84h-.05c-.28.36-.57.6-.86.74-.28.13-.65.2-1.1.2-.6 0-1.05-.16-1.38-.48-.32-.32-.5-.77-.5-1.34 0-.62.24-1.08.7-1.4.45-.3 1.14-.47 2.07-.5l1.02-.03V9.2c0-.37-.1-.65-.27-.84-.17-.2-.45-.28-.82-.28-.3 0-.6.04-.88.13a6.68 6.68 0 0 0-.8.33l-.4-.9a4.4 4.4 0 0 1 1.05-.4 4.86 4.86 0 0 1 1.08-.12c.76 0 1.33.18 1.7.5.4.33.6.85.6 1.56v4h-.9zm-1.9-.87c.47 0 .83-.13 1.1-.38.3-.26.43-.62.43-1.08v-.52l-.76.03c-.6.03-1.02.13-1.3.3s-.4.45-.4.82c0 .26.08.47.24.6.16.16.4.23.7.23zm7.57-5.2c.25 0 .46.03.62.06l-.12 1.18a2.38 2.38 0 0 0-.56-.06c-.5 0-.92.16-1.24.5-.3.32-.47.75-.47 1.27v3.1h-1.27V7.23h1l.16 1.05h.05c.2-.36.45-.64.77-.85a1.83 1.83 0 0 1 1.02-.3zm4.12 6.17c-.9 0-1.58-.27-2.05-.8-.47-.52-.7-1.27-.7-2.25 0-1 .24-1.77.73-2.3.5-.54 1.2-.8 2.12-.8.63 0 1.2.1 1.7.34l-.4 1c-.52-.2-.96-.3-1.3-.3-1.04 0-1.55.68-1.55 2.05 0 .67.13 1.17.38 1.5.26.34.64.5 1.13.5a3.23 3.23 0 0 0 1.6-.4v1.1a2.53 2.53 0 0 1-.73.28 4.36 4.36 0 0 1-.93.08zm8.28-.1h-1.27V9.5c0-.45-.1-.8-.28-1.02-.18-.23-.47-.34-.88-.34-.53 0-.9.16-1.16.48-.25.3-.38.85-.38 1.6v2.94h-1.26V4.8h1.26v2.12c0 .34-.02.7-.06 1.1h.08a1.76 1.76 0 0 1 .72-.67c.3-.16.66-.24 1.07-.24 1.43 0 2.15.74 2.15 2.2v3.86zM42.2 7.1c.74 0 1.32.28 1.73.82.4.53.62 1.3.62 2.26 0 .97-.2 1.73-.63 2.27-.42.54-1 .82-1.75.82s-1.33-.27-1.75-.8h-.08l-.23.7h-.94V4.8h1.26v2l-.02.64-.03.56h.05c.4-.6 1-.9 1.78-.9zm-.33 1.04c-.5 0-.88.15-1.1.45-.22.3-.34.8-.35 1.5v.08c0 .72.12 1.24.35 1.57.23.32.6.48 1.12.48.44 0 .78-.17 1-.53.24-.35.36-.87.36-1.53 0-1.35-.47-2.03-1.4-2.03zm3.24-.92h1.4l1.2 3.37c.18.47.3.92.36 1.34h.04l.18-.72 1.37-4H51l-2.53 6.73c-.46 1.23-1.23 1.85-2.3 1.85-.3 0-.56-.03-.83-.1v-1c.2.05.4.08.65.08.6 0 1.03-.36 1.28-1.06l.22-.56-2.4-5.94z'/%3E%3C/g%3E%3C/svg%3E"); - background-repeat: no-repeat; - background-position: center; - background-size: 100%; - overflow: hidden; - text-indent: -9000px; - width: 110px; - height: 100%; - display: block; - margin-left: auto; - margin-right: 5px; -} diff --git a/website/src/theme/SearchBar/index.js b/website/src/theme/SearchBar/index.js deleted file mode 100644 index 3c59678f87..0000000000 --- a/website/src/theme/SearchBar/index.js +++ /dev/null @@ -1,114 +0,0 @@ -import React, { useRef, useCallback, useState } from "react"; -import classnames from "classnames"; -import { useHistory } from "@docusaurus/router"; -import useDocusaurusContext from "@docusaurus/useDocusaurusContext"; -import { usePluginData } from '@docusaurus/useGlobalData'; -import useIsBrowser from "@docusaurus/useIsBrowser"; -const Search = props => { - const initialized = useRef(false); - const searchBarRef = useRef(null); - const [indexReady, setIndexReady] = useState(false); - const history = useHistory(); - const { siteConfig = {} } = useDocusaurusContext(); - const isBrowser = useIsBrowser(); - const { baseUrl } = siteConfig; - const initAlgolia = (searchDocs, searchIndex, DocSearch) => { - new DocSearch({ - searchDocs, - searchIndex, - baseUrl, - inputSelector: "#search_input_react", - // Override algolia's default selection event, allowing us to do client-side - // navigation and avoiding a full page refresh. - handleSelected: (_input, _event, suggestion) => { - const url = suggestion.url || "/"; - // Use an anchor tag to parse the absolute url into a relative url - // Alternatively, we can use new URL(suggestion.url) but its not supported in IE - const a = document.createElement("a"); - a.href = url; - // Algolia use closest parent element id #__docusaurus when a h1 page title does not have an id - // So, we can safely remove it. See https://github.com/facebook/docusaurus/issues/1828 for more details. - - history.push(url); - } - }); - }; - - const pluginData = usePluginData('docusaurus-lunr-search'); - const getSearchDoc = () => - process.env.NODE_ENV === "production" - ? fetch(`${baseUrl}${pluginData.fileNames.searchDoc}`).then((content) => content.json()) - : Promise.resolve([]); - - const getLunrIndex = () => - process.env.NODE_ENV === "production" - ? fetch(`${baseUrl}${pluginData.fileNames.lunrIndex}`).then((content) => content.json()) - : Promise.resolve([]); - - const loadAlgolia = () => { - if (!initialized.current) { - Promise.all([ - getSearchDoc(), - getLunrIndex(), - import("./DocSearch"), - import("./algolia.css") - ]).then(([searchDocs, searchIndex, { default: DocSearch }]) => { - if (searchDocs.length === 0) { - return; - } - initAlgolia(searchDocs, searchIndex, DocSearch); - setIndexReady(true); - }); - initialized.current = true; - } - }; - - const toggleSearchIconClick = useCallback( - e => { - if (!searchBarRef.current.contains(e.target)) { - searchBarRef.current.focus(); - } - - props.handleSearchBarToggle && props.handleSearchBarToggle(!props.isSearchBarExpanded); - }, - [props.isSearchBarExpanded] - ); - - if (isBrowser) { - loadAlgolia(); - } - - return ( -

- ); -}; - -export default Search; diff --git a/website/src/theme/SearchBar/lunar-search.js b/website/src/theme/SearchBar/lunar-search.js deleted file mode 100644 index 25681f4862..0000000000 --- a/website/src/theme/SearchBar/lunar-search.js +++ /dev/null @@ -1,147 +0,0 @@ -import lunr from "@generated/lunr.client"; -lunr.tokenizer.separator = /[\s\-/]+/; - -class LunrSearchAdapter { - constructor(searchDocs, searchIndex, baseUrl = '/') { - this.searchDocs = searchDocs; - this.lunrIndex = lunr.Index.load(searchIndex); - this.baseUrl = baseUrl; - } - - getLunrResult(input) { - return this.lunrIndex.query(function (query) { - const tokens = lunr.tokenizer(input); - query.term(tokens, { - boost: 10 - }); - query.term(tokens, { - wildcard: lunr.Query.wildcard.TRAILING - }); - }); - } - - getHit(doc, formattedTitle, formattedContent) { - return { - hierarchy: { - lvl0: doc.pageTitle || doc.title, - lvl1: doc.type === 0 ? null : doc.title - }, - url: doc.url, - _snippetResult: formattedContent ? { - content: { - value: formattedContent, - matchLevel: "full" - } - } : null, - _highlightResult: { - hierarchy: { - lvl0: { - value: doc.type === 0 ? formattedTitle || doc.title : doc.pageTitle, - }, - lvl1: - doc.type === 0 - ? null - : { - value: formattedTitle || doc.title - } - } - } - }; - } - getTitleHit(doc, position, length) { - const start = position[0]; - const end = position[0] + length; - let formattedTitle = doc.title.substring(0, start) + '' + doc.title.substring(start, end) + '' + doc.title.substring(end, doc.title.length); - return this.getHit(doc, formattedTitle) - } - - getKeywordHit(doc, position, length) { - const start = position[0]; - const end = position[0] + length; - let formattedTitle = doc.title + '
Keywords: ' + doc.keywords.substring(0, start) + '' + doc.keywords.substring(start, end) + '' + doc.keywords.substring(end, doc.keywords.length) + '' - return this.getHit(doc, formattedTitle) - } - - getContentHit(doc, position) { - const start = position[0]; - const end = position[0] + position[1]; - let previewStart = start; - let previewEnd = end; - let ellipsesBefore = true; - let ellipsesAfter = true; - for (let k = 0; k < 3; k++) { - const nextSpace = doc.content.lastIndexOf(' ', previewStart - 2); - const nextDot = doc.content.lastIndexOf('.', previewStart - 2); - if ((nextDot > 0) && (nextDot > nextSpace)) { - previewStart = nextDot + 1; - ellipsesBefore = false; - break; - } - if (nextSpace < 0) { - previewStart = 0; - ellipsesBefore = false; - break; - } - previewStart = nextSpace + 1; - } - for (let k = 0; k < 10; k++) { - const nextSpace = doc.content.indexOf(' ', previewEnd + 1); - const nextDot = doc.content.indexOf('.', previewEnd + 1); - if ((nextDot > 0) && (nextDot < nextSpace)) { - previewEnd = nextDot; - ellipsesAfter = false; - break; - } - if (nextSpace < 0) { - previewEnd = doc.content.length; - ellipsesAfter = false; - break; - } - previewEnd = nextSpace; - } - let preview = doc.content.substring(previewStart, start); - if (ellipsesBefore) { - preview = '... ' + preview; - } - preview += '' + doc.content.substring(start, end) + ''; - preview += doc.content.substring(end, previewEnd); - if (ellipsesAfter) { - preview += ' ...'; - } - return this.getHit(doc, null, preview); - - } - search(input) { - return new Promise((resolve, rej) => { - const results = this.getLunrResult(input); - const hits = []; - results.length > 5 && (results.length = 5); - this.titleHitsRes = [] - this.contentHitsRes = [] - results.forEach(result => { - const doc = this.searchDocs[result.ref]; - const { metadata } = result.matchData; - for (let i in metadata) { - if (metadata[i].title) { - if (!this.titleHitsRes.includes(result.ref)) { - const position = metadata[i].title.position[0] - hits.push(this.getTitleHit(doc, position, input.length)); - this.titleHitsRes.push(result.ref); - } - } else if (metadata[i].content) { - const position = metadata[i].content.position[0] - hits.push(this.getContentHit(doc, position)) - } else if (metadata[i].keywords) { - const position = metadata[i].keywords.position[0] - hits.push(this.getKeywordHit(doc, position, input.length)); - this.titleHitsRes.push(result.ref); - } - } - }); - hits.length > 5 && (hits.length = 5); - resolve(hits); - }); - } -} - -export default LunrSearchAdapter; diff --git a/website/src/theme/SearchBar/styles.css b/website/src/theme/SearchBar/styles.css deleted file mode 100644 index 8dc3b0a12b..0000000000 --- a/website/src/theme/SearchBar/styles.css +++ /dev/null @@ -1,33 +0,0 @@ -.search-icon { - background-image: var(--ifm-navbar-search-input-icon); - height: auto; - width: 24px; - cursor: pointer; - padding: 8px; - line-height: 32px; - background-repeat: no-repeat; - background-position: center; - display: none; -} - -.search-icon-hidden { - visibility: hidden; -} - -@media (max-width: 360px) { - .search-bar { - width: 0 !important; - background: none !important; - padding: 0 !important; - transition: none !important; - } - - .search-bar-expanded { - width: 9rem !important; - } - - .search-icon { - display: inline; - vertical-align: sub; - } -} diff --git a/website/src/theme/SearchBar/templates.js b/website/src/theme/SearchBar/templates.js deleted file mode 100644 index 1485e7858d..0000000000 --- a/website/src/theme/SearchBar/templates.js +++ /dev/null @@ -1,112 +0,0 @@ -const prefix = 'algolia-docsearch'; -const suggestionPrefix = `${prefix}-suggestion`; -const footerPrefix = `${prefix}-footer`; - -const templates = { - suggestion: ` -
-
- {{{category}}} -
-
-
- {{{subcategory}}} -
- {{#isTextOrSubcategoryNonEmpty}} -
-
{{{subcategory}}}
-
{{{title}}}
- {{#text}}
{{{text}}}
{{/text}} -
- {{/isTextOrSubcategoryNonEmpty}} -
-
- `, - suggestionSimple: ` -
-
- {{^isLvl0}} - {{{category}}} - {{^isLvl1}} - {{^isLvl1EmptyOrDuplicate}} - - {{{subcategory}}} - - {{/isLvl1EmptyOrDuplicate}} - {{/isLvl1}} - {{/isLvl0}} -
- {{#isLvl2}} - {{{title}}} - {{/isLvl2}} - {{#isLvl1}} - {{{subcategory}}} - {{/isLvl1}} - {{#isLvl0}} - {{{category}}} - {{/isLvl0}} -
-
-
- {{#text}} -
-
{{{text}}}
-
- {{/text}} -
-
- `, - footer: ` -
-
- `, - empty: ` -
-
-
-
-
- No results found for query "{{query}}" -
-
-
-
-
- `, - searchBox: ` - - - - `, -}; - -export default templates; diff --git a/website/src/theme/SearchBar/utils.js b/website/src/theme/SearchBar/utils.js deleted file mode 100644 index 0807500d76..0000000000 --- a/website/src/theme/SearchBar/utils.js +++ /dev/null @@ -1,270 +0,0 @@ -import $ from "autocomplete.js/zepto"; - -const utils = { - /* - * Move the content of an object key one level higher. - * eg. - * { - * name: 'My name', - * hierarchy: { - * lvl0: 'Foo', - * lvl1: 'Bar' - * } - * } - * Will be converted to - * { - * name: 'My name', - * lvl0: 'Foo', - * lvl1: 'Bar' - * } - * @param {Object} object Main object - * @param {String} property Main object key to move up - * @return {Object} - * @throws Error when key is not an attribute of Object or is not an object itself - */ - mergeKeyWithParent(object, property) { - if (object[property] === undefined) { - return object; - } - if (typeof object[property] !== 'object') { - return object; - } - const newObject = $.extend({}, object, object[property]); - delete newObject[property]; - return newObject; - }, - /* - * Group all objects of a collection by the value of the specified attribute - * If the attribute is a string, use the lowercase form. - * - * eg. - * groupBy([ - * {name: 'Tim', category: 'dev'}, - * {name: 'Vincent', category: 'dev'}, - * {name: 'Ben', category: 'sales'}, - * {name: 'Jeremy', category: 'sales'}, - * {name: 'AlexS', category: 'dev'}, - * {name: 'AlexK', category: 'sales'} - * ], 'category'); - * => - * { - * 'devs': [ - * {name: 'Tim', category: 'dev'}, - * {name: 'Vincent', category: 'dev'}, - * {name: 'AlexS', category: 'dev'} - * ], - * 'sales': [ - * {name: 'Ben', category: 'sales'}, - * {name: 'Jeremy', category: 'sales'}, - * {name: 'AlexK', category: 'sales'} - * ] - * } - * @param {array} collection Array of objects to group - * @param {String} property The attribute on which apply the grouping - * @return {array} - * @throws Error when one of the element does not have the specified property - */ - groupBy(collection, property) { - const newCollection = {}; - $.each(collection, (index, item) => { - if (item[property] === undefined) { - throw new Error(`[groupBy]: Object has no key ${property}`); - } - let key = item[property]; - if (typeof key === 'string') { - key = key.toLowerCase(); - } - // fix #171 the given data type of docsearch hits might be conflict with the properties of the native Object, - // such as the constructor, so we need to do this check. - if (!Object.prototype.hasOwnProperty.call(newCollection, key)) { - newCollection[key] = []; - } - newCollection[key].push(item); - }); - return newCollection; - }, - /* - * Return an array of all the values of the specified object - * eg. - * values({ - * foo: 42, - * bar: true, - * baz: 'yep' - * }) - * => - * [42, true, yep] - * @param {object} object Object to extract values from - * @return {array} - */ - values(object) { - return Object.keys(object).map(key => object[key]); - }, - /* - * Flattens an array - * eg. - * flatten([1, 2, [3, 4], [5, 6]]) - * => - * [1, 2, 3, 4, 5, 6] - * @param {array} array Array to flatten - * @return {array} - */ - flatten(array) { - const results = []; - array.forEach(value => { - if (!Array.isArray(value)) { - results.push(value); - return; - } - value.forEach(subvalue => { - results.push(subvalue); - }); - }); - return results; - }, - /* - * Flatten all values of an object into an array, marking each first element of - * each group with a specific flag - * eg. - * flattenAndFlagFirst({ - * 'devs': [ - * {name: 'Tim', category: 'dev'}, - * {name: 'Vincent', category: 'dev'}, - * {name: 'AlexS', category: 'dev'} - * ], - * 'sales': [ - * {name: 'Ben', category: 'sales'}, - * {name: 'Jeremy', category: 'sales'}, - * {name: 'AlexK', category: 'sales'} - * ] - * , 'isTop'); - * => - * [ - * {name: 'Tim', category: 'dev', isTop: true}, - * {name: 'Vincent', category: 'dev', isTop: false}, - * {name: 'AlexS', category: 'dev', isTop: false}, - * {name: 'Ben', category: 'sales', isTop: true}, - * {name: 'Jeremy', category: 'sales', isTop: false}, - * {name: 'AlexK', category: 'sales', isTop: false} - * ] - * @param {object} object Object to flatten - * @param {string} flag Flag to set to true on first element of each group - * @return {array} - */ - flattenAndFlagFirst(object, flag) { - const values = this.values(object).map(collection => - collection.map((item, index) => { - // eslint-disable-next-line no-param-reassign - item[flag] = index === 0; - return item; - }) - ); - return this.flatten(values); - }, - /* - * Removes all empty strings, null, false and undefined elements array - * eg. - * compact([42, false, null, undefined, '', [], 'foo']); - * => - * [42, [], 'foo'] - * @param {array} array Array to compact - * @return {array} - */ - compact(array) { - const results = []; - array.forEach(value => { - if (!value) { - return; - } - results.push(value); - }); - return results; - }, - /* - * Returns the highlighted value of the specified key in the specified object. - * If no highlighted value is available, will return the key value directly - * eg. - * getHighlightedValue({ - * _highlightResult: { - * text: { - * value: 'foo' - * } - * }, - * text: 'foo' - * }, 'text'); - * => - * 'foo' - * @param {object} object Hit object returned by the Algolia API - * @param {string} property Object key to look for - * @return {string} - **/ - getHighlightedValue(object, property) { - if ( - object._highlightResult && - object._highlightResult.hierarchy_camel && - object._highlightResult.hierarchy_camel[property] && - object._highlightResult.hierarchy_camel[property].matchLevel && - object._highlightResult.hierarchy_camel[property].matchLevel !== 'none' && - object._highlightResult.hierarchy_camel[property].value - ) { - return object._highlightResult.hierarchy_camel[property].value; - } - if ( - object._highlightResult && - object._highlightResult && - object._highlightResult[property] && - object._highlightResult[property].value - ) { - return object._highlightResult[property].value; - } - return object[property]; - }, - /* - * Returns the snippeted value of the specified key in the specified object. - * If no highlighted value is available, will return the key value directly. - * Will add starting and ending ellipsis (…) if we detect that a sentence is - * incomplete - * eg. - * getSnippetedValue({ - * _snippetResult: { - * text: { - * value: 'This is an unfinished sentence' - * } - * }, - * text: 'This is an unfinished sentence' - * }, 'text'); - * => - * 'This is an unfinished sentence…' - * @param {object} object Hit object returned by the Algolia API - * @param {string} property Object key to look for - * @return {string} - **/ - getSnippetedValue(object, property) { - if ( - !object._snippetResult || - !object._snippetResult[property] || - !object._snippetResult[property].value - ) { - return object[property]; - } - let snippet = object._snippetResult[property].value; - - if (snippet[0] !== snippet[0].toUpperCase()) { - snippet = `…${snippet}`; - } - if (['.', '!', '?'].indexOf(snippet[snippet.length - 1]) === -1) { - snippet = `${snippet}…`; - } - return snippet; - }, - /* - * Deep clone an object. - * Note: This will not clone functions and dates - * @param {object} object Object to clone - * @return {object} - */ - deepClone(object) { - return JSON.parse(JSON.stringify(object)); - }, -}; - -export default utils; diff --git a/website/yarn.lock b/website/yarn.lock index dbd907baae..12b3fa3fe4 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2,133 +2,279 @@ # yarn lockfile v1 -"@algolia/autocomplete-core@1.7.1": - version "1.7.1" - resolved "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.7.1.tgz" - integrity sha512-eiZw+fxMzNQn01S8dA/hcCpoWCOCwcIIEUtHHdzN5TGB3IpzLbuhqFeTfh2OUhhgkE8Uo17+wH+QJ/wYyQmmzg== - dependencies: - "@algolia/autocomplete-shared" "1.7.1" - -"@algolia/autocomplete-preset-algolia@1.7.1": - version "1.7.1" - resolved "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.7.1.tgz" - integrity sha512-pJwmIxeJCymU1M6cGujnaIYcY3QPOVYZOXhFkWVM7IxKzy272BwCvMFMyc5NpG/QmiObBxjo7myd060OeTNJXg== - dependencies: - "@algolia/autocomplete-shared" "1.7.1" - -"@algolia/autocomplete-shared@1.7.1": - version "1.7.1" - resolved "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.7.1.tgz" - integrity sha512-eTmGVqY3GeyBTT8IWiB2K5EuURAqhnumfktAEoHxfDY2o7vg2rSnO16ZtIG0fMgt3py28Vwgq42/bVEuaQV7pg== - -"@algolia/cache-browser-local-storage@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.13.1.tgz" - integrity sha512-UAUVG2PEfwd/FfudsZtYnidJ9eSCpS+LW9cQiesePQLz41NAcddKxBak6eP2GErqyFagSlnVXe/w2E9h2m2ttg== - dependencies: - "@algolia/cache-common" "4.13.1" - -"@algolia/cache-common@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.13.1.tgz" - integrity sha512-7Vaf6IM4L0Jkl3sYXbwK+2beQOgVJ0mKFbz/4qSxKd1iy2Sp77uTAazcX+Dlexekg1fqGUOSO7HS4Sx47ZJmjA== - -"@algolia/cache-in-memory@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.13.1.tgz" - integrity sha512-pZzybCDGApfA/nutsFK1P0Sbsq6fYJU3DwIvyKg4pURerlJM4qZbB9bfLRef0FkzfQu7W11E4cVLCIOWmyZeuQ== - dependencies: - "@algolia/cache-common" "4.13.1" - -"@algolia/client-account@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.13.1.tgz" - integrity sha512-TFLiZ1KqMiir3FNHU+h3b0MArmyaHG+eT8Iojio6TdpeFcAQ1Aiy+2gb3SZk3+pgRJa/BxGmDkRUwE5E/lv3QQ== +"@algolia/autocomplete-core@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.9.3.tgz#1d56482a768c33aae0868c8533049e02e8961be7" + integrity sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw== dependencies: - "@algolia/client-common" "4.13.1" - "@algolia/client-search" "4.13.1" - "@algolia/transporter" "4.13.1" + "@algolia/autocomplete-plugin-algolia-insights" "1.9.3" + "@algolia/autocomplete-shared" "1.9.3" -"@algolia/client-analytics@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.13.1.tgz" - integrity sha512-iOS1JBqh7xaL5x00M5zyluZ9+9Uy9GqtYHv/2SMuzNW1qP7/0doz1lbcsP3S7KBbZANJTFHUOfuqyRLPk91iFA== +"@algolia/autocomplete-plugin-algolia-insights@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.9.3.tgz#9b7f8641052c8ead6d66c1623d444cbe19dde587" + integrity sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg== dependencies: - "@algolia/client-common" "4.13.1" - "@algolia/client-search" "4.13.1" - "@algolia/requester-common" "4.13.1" - "@algolia/transporter" "4.13.1" + "@algolia/autocomplete-shared" "1.9.3" -"@algolia/client-common@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.13.1.tgz" - integrity sha512-LcDoUE0Zz3YwfXJL6lJ2OMY2soClbjrrAKB6auYVMNJcoKZZ2cbhQoFR24AYoxnGUYBER/8B+9sTBj5bj/Gqbg== +"@algolia/autocomplete-preset-algolia@1.17.6": + version "1.17.6" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.17.6.tgz#465b652bff5c262aad4da2488d78629cfa906be6" + integrity sha512-Cvg5JENdSCMuClwhJ1ON1/jSuojaYMiUW2KePm18IkdCzPJj/NXojaOxw58RFtQFpJgfVW8h2E8mEoDtLlMdeA== dependencies: - "@algolia/requester-common" "4.13.1" - "@algolia/transporter" "4.13.1" + "@algolia/autocomplete-shared" "1.17.6" -"@algolia/client-personalization@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.13.1.tgz" - integrity sha512-1CqrOW1ypVrB4Lssh02hP//YxluoIYXAQCpg03L+/RiXJlCs+uIqlzC0ctpQPmxSlTK6h07kr50JQoYH/TIM9w== - dependencies: - "@algolia/client-common" "4.13.1" - "@algolia/requester-common" "4.13.1" - "@algolia/transporter" "4.13.1" +"@algolia/autocomplete-shared@1.17.6": + version "1.17.6" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.17.6.tgz#ad951632b6d477d4ba9a68a347e1702d26009d58" + integrity sha512-aq/3V9E00Tw2GC/PqgyPGXtqJUlVc17v4cn1EUhSc+O/4zd04Uwb3UmPm8KDaYQQOrkt1lwvCj2vG2wRE5IKhw== -"@algolia/client-search@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.13.1.tgz" - integrity sha512-YQKYA83MNRz3FgTNM+4eRYbSmHi0WWpo019s5SeYcL3HUan/i5R09VO9dk3evELDFJYciiydSjbsmhBzbpPP2A== - dependencies: - "@algolia/client-common" "4.13.1" - "@algolia/requester-common" "4.13.1" - "@algolia/transporter" "4.13.1" +"@algolia/autocomplete-shared@1.9.3": + version "1.9.3" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz#2e22e830d36f0a9cf2c0ccd3c7f6d59435b77dfa" + integrity sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ== + +"@algolia/cache-browser-local-storage@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.24.0.tgz#97bc6d067a9fd932b9c922faa6b7fd6e546e1348" + integrity sha512-t63W9BnoXVrGy9iYHBgObNXqYXM3tYXCjDSHeNwnsc324r4o5UiVKUiAB4THQ5z9U5hTj6qUvwg/Ez43ZD85ww== + dependencies: + "@algolia/cache-common" "4.24.0" + +"@algolia/cache-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/cache-common/-/cache-common-4.24.0.tgz#81a8d3a82ceb75302abb9b150a52eba9960c9744" + integrity sha512-emi+v+DmVLpMGhp0V9q9h5CdkURsNmFC+cOS6uK9ndeJm9J4TiqSvPYVu+THUP8P/S08rxf5x2P+p3CfID0Y4g== + +"@algolia/cache-in-memory@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/cache-in-memory/-/cache-in-memory-4.24.0.tgz#ffcf8872f3a10cb85c4f4641bdffd307933a6e44" + integrity sha512-gDrt2so19jW26jY3/MkFg5mEypFIPbPoXsQGQWAi6TrCPsNOSEYepBMPlucqWigsmEy/prp5ug2jy/N3PVG/8w== + dependencies: + "@algolia/cache-common" "4.24.0" + +"@algolia/client-abtesting@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-abtesting/-/client-abtesting-5.12.0.tgz#45175422ee85d505ff6a16d1634a739478a6ad0b" + integrity sha512-hx4eVydkm3yrFCFxmcBtSzI/ykt0cZ6sDWch+v3JTgKpD2WtosMJU3Upv1AjQ4B6COSHCOWEX3vfFxW6OoH6aA== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/client-account@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-account/-/client-account-4.24.0.tgz#eba7a921d828e7c8c40a32d4add21206c7fe12f1" + integrity sha512-adcvyJ3KjPZFDybxlqnf+5KgxJtBjwTPTeyG2aOyoJvx0Y8dUQAEOEVOJ/GBxX0WWNbmaSrhDURMhc+QeevDsA== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-analytics@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-4.24.0.tgz#9d2576c46a9093a14e668833c505ea697a1a3e30" + integrity sha512-y8jOZt1OjwWU4N2qr8G4AxXAzaa8DBvyHTWlHzX/7Me1LX8OayfgHexqrsL4vSBcoMmVw2XnVW9MhL+Y2ZDJXg== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-analytics@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-analytics/-/client-analytics-5.12.0.tgz#e387f4de01f4fb549b7506762003bef335be2927" + integrity sha512-EpTsSv6IW8maCfXCDIptgT7+mQJj7pImEkcNUnxR8yUKAHzTogTXv9yGm2WXOZFVuwstd2i0sImhQ1Vz8RH/hA== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/client-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-4.24.0.tgz#77c46eee42b9444a1d1c1583a83f7df4398a649d" + integrity sha512-bc2ROsNL6w6rqpl5jj/UywlIYC21TwSSoFHKl01lYirGMW+9Eek6r02Tocg4gZ8HAw3iBvu6XQiM3BEbmEMoiA== + dependencies: + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-common@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-common/-/client-common-5.12.0.tgz#e33b6fefb333beb56eb58ab7424fcd7ec11ac7d0" + integrity sha512-od3WmO8qxyfNhKc+K3D17tvun3IMs/xMNmxCG9MiElAkYVbPPTRUYMkRneCpmJyQI0hNx2/EA4kZgzVfQjO86Q== + +"@algolia/client-insights@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-insights/-/client-insights-5.12.0.tgz#bb80c4227178b452dd93a649b9991b8140cba52d" + integrity sha512-8alajmsYUd+7vfX5lpRNdxqv3Xx9clIHLUItyQK0Z6gwGMbVEFe6YYhgDtwslMAP0y6b0WeJEIZJMLgT7VYpRw== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/client-personalization@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-4.24.0.tgz#8b47789fb1cb0f8efbea0f79295b7c5a3850f6ae" + integrity sha512-l5FRFm/yngztweU0HdUzz1rC4yoWCFo3IF+dVIVTfEPg906eZg5BOd1k0K6rZx5JzyyoP4LdmOikfkfGsKVE9w== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-personalization@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-personalization/-/client-personalization-5.12.0.tgz#ad711245403754686efff6a65d6c83877e64ecfa" + integrity sha512-bUV9HtfkTBgpoVhxFrMkmVPG03ZN1Rtn51kiaEtukucdk3ggjR9Qu1YUfRSU2lFgxr9qJc8lTxwfvhjCeJRcqw== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/client-query-suggestions@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-query-suggestions/-/client-query-suggestions-5.12.0.tgz#fc3bbf6d86e8989bb8487dc69ec49743fa75ceb4" + integrity sha512-Q5CszzGWfxbIDs9DJ/QJsL7bP6h+lJMg27KxieEnI9KGCu0Jt5iFA3GkREkgRZxRdzlHbZKkrIzhtHVbSHw/rg== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/client-search@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-4.24.0.tgz#75e6c02d33ef3e0f34afd9962c085b856fc4a55f" + integrity sha512-uRW6EpNapmLAD0mW47OXqTP8eiIx5F6qN9/x/7HHO6owL3N1IXqydGwW5nhDFBrV+ldouro2W1VX3XlcUXEFCA== + dependencies: + "@algolia/client-common" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/client-search@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/client-search/-/client-search-5.12.0.tgz#cd3eb4854664177d6e992bb2b942e2a12e4cb919" + integrity sha512-R3qzEytgVLHOGNri+bpta6NtTt7YtkvUe/QBcAmMDjW4Jk1P0eBYIPfvnzIPbINRsLxIq9fZs9uAYBgsrts4Zg== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" "@algolia/events@^4.0.1": version "4.0.1" resolved "https://registry.npmjs.org/@algolia/events/-/events-4.0.1.tgz" integrity sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ== -"@algolia/logger-common@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.13.1.tgz" - integrity sha512-L6slbL/OyZaAXNtS/1A8SAbOJeEXD5JcZeDCPYDqSTYScfHu+2ePRTDMgUTY4gQ7HsYZ39N1LujOd8WBTmM2Aw== - -"@algolia/logger-console@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.13.1.tgz" - integrity sha512-7jQOTftfeeLlnb3YqF8bNgA2GZht7rdKkJ31OCeSH2/61haO0tWPoNRjZq9XLlgMQZH276pPo0NdiArcYPHjCA== - dependencies: - "@algolia/logger-common" "4.13.1" - -"@algolia/requester-browser-xhr@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.13.1.tgz" - integrity sha512-oa0CKr1iH6Nc7CmU6RE7TnXMjHnlyp7S80pP/LvZVABeJHX3p/BcSCKovNYWWltgTxUg0U1o+2uuy8BpMKljwA== - dependencies: - "@algolia/requester-common" "4.13.1" - -"@algolia/requester-common@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.13.1.tgz" - integrity sha512-eGVf0ID84apfFEuXsaoSgIxbU3oFsIbz4XiotU3VS8qGCJAaLVUC5BUJEkiFENZIhon7hIB4d0RI13HY4RSA+w== - -"@algolia/requester-node-http@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.13.1.tgz" - integrity sha512-7C0skwtLdCz5heKTVe/vjvrqgL/eJxmiEjHqXdtypcE5GCQCYI15cb+wC4ytYioZDMiuDGeVYmCYImPoEgUGPw== - dependencies: - "@algolia/requester-common" "4.13.1" - -"@algolia/transporter@4.13.1": - version "4.13.1" - resolved "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.13.1.tgz" - integrity sha512-pICnNQN7TtrcYJqqPEXByV8rJ8ZRU2hCiIKLTLRyNpghtQG3VAFk6fVtdzlNfdUGZcehSKGarPIZEHlQXnKjgw== - dependencies: - "@algolia/cache-common" "4.13.1" - "@algolia/logger-common" "4.13.1" - "@algolia/requester-common" "4.13.1" +"@algolia/ingestion@1.12.0": + version "1.12.0" + resolved "https://registry.yarnpkg.com/@algolia/ingestion/-/ingestion-1.12.0.tgz#01a297fb2a58019595e5d74e95939da033a18194" + integrity sha512-zpHo6qhR22tL8FsdSI4DvEraPDi/019HmMrCFB/TUX98yzh5ooAU7sNW0qPL1I7+S++VbBmNzJOEU9VI8tEC8A== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/logger-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/logger-common/-/logger-common-4.24.0.tgz#28d439976019ec0a46ba7a1a739ef493d4ef8123" + integrity sha512-LLUNjkahj9KtKYrQhFKCzMx0BY3RnNP4FEtO+sBybCjJ73E8jNdaKJ/Dd8A/VA4imVHP5tADZ8pn5B8Ga/wTMA== + +"@algolia/logger-console@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/logger-console/-/logger-console-4.24.0.tgz#c6ff486036cd90b81d07a95aaba04461da7e1c65" + integrity sha512-X4C8IoHgHfiUROfoRCV+lzSy+LHMgkoEEU1BbKcsfnV0i0S20zyy0NLww9dwVHUWNfPPxdMU+/wKmLGYf96yTg== + dependencies: + "@algolia/logger-common" "4.24.0" + +"@algolia/monitoring@1.12.0": + version "1.12.0" + resolved "https://registry.yarnpkg.com/@algolia/monitoring/-/monitoring-1.12.0.tgz#f510bfd9d09352b31ccce293d1fd84cdea59354c" + integrity sha512-i2AJZED/zf4uhxezAJUhMKoL5QoepCBp2ynOYol0N76+TSoohaMADdPnWCqOULF4RzOwrG8wWynAwBlXsAI1RQ== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/recommend@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/recommend/-/recommend-4.24.0.tgz#8a3f78aea471ee0a4836b78fd2aad4e9abcaaf34" + integrity sha512-P9kcgerfVBpfYHDfVZDvvdJv0lEoCvzNlOy2nykyt5bK8TyieYyiD0lguIJdRZZYGre03WIAFf14pgE+V+IBlw== + dependencies: + "@algolia/cache-browser-local-storage" "4.24.0" + "@algolia/cache-common" "4.24.0" + "@algolia/cache-in-memory" "4.24.0" + "@algolia/client-common" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/logger-console" "4.24.0" + "@algolia/requester-browser-xhr" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/requester-node-http" "4.24.0" + "@algolia/transporter" "4.24.0" + +"@algolia/recommend@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/recommend/-/recommend-5.12.0.tgz#bc9f69c78c08ba9a3579e7fe2a0f4037b494cc55" + integrity sha512-0jmZyKvYnB/Bj5c7WKsKedOUjnr0UtXm0LVFUdQrxXfqOqvWv9n6Vpr65UjdYG4Q49kRQxhlwtal9WJYrYymXg== + dependencies: + "@algolia/client-common" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +"@algolia/requester-browser-xhr@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.24.0.tgz#313c5edab4ed73a052e75803855833b62dd19c16" + integrity sha512-Z2NxZMb6+nVXSjF13YpjYTdvV3032YTBSGm2vnYvYPA6mMxzM3v5rsCiSspndn9rzIW4Qp1lPHBvuoKJV6jnAA== + dependencies: + "@algolia/requester-common" "4.24.0" + +"@algolia/requester-browser-xhr@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-browser-xhr/-/requester-browser-xhr-5.12.0.tgz#dba0072d5098a145e4724a723ea1c765b4af0cb6" + integrity sha512-KxwleraFuVoEGCoeW6Y1RAEbgBMS7SavqeyzWdtkJc6mXeCOJXn1iZitb8Tyn2FcpMNUKlSm0adrUTt7G47+Ow== + dependencies: + "@algolia/client-common" "5.12.0" + +"@algolia/requester-common@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-common/-/requester-common-4.24.0.tgz#1c60c198031f48fcdb9e34c4057a3ea987b9a436" + integrity sha512-k3CXJ2OVnvgE3HMwcojpvY6d9kgKMPRxs/kVohrwF5WMr2fnqojnycZkxPoEg+bXm8fi5BBfFmOqgYztRtHsQA== + +"@algolia/requester-fetch@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-fetch/-/requester-fetch-5.12.0.tgz#4db2772b9b0699fdfadbcd7b87e0608a4acf8363" + integrity sha512-FuDZXUGU1pAg2HCnrt8+q1VGHKChV/LhvjvZlLOT7e56GJie6p+EuLu4/hMKPOVuQQ8XXtrTHKIU3Lw+7O5/bQ== + dependencies: + "@algolia/client-common" "5.12.0" + +"@algolia/requester-node-http@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-4.24.0.tgz#4461593714031d02aa7da221c49df675212f482f" + integrity sha512-JF18yTjNOVYvU/L3UosRcvbPMGT9B+/GQWNWnenIImglzNVGpyzChkXLnrSf6uxwVNO6ESGu6oN8MqcGQcjQJw== + dependencies: + "@algolia/requester-common" "4.24.0" + +"@algolia/requester-node-http@5.12.0": + version "5.12.0" + resolved "https://registry.yarnpkg.com/@algolia/requester-node-http/-/requester-node-http-5.12.0.tgz#6c6bb47df33351b819790f26346632196c97a3c7" + integrity sha512-ncDDY7CxZhMs6LIoPl+vHFQceIBhYPY5EfuGF1V7beO0U38xfsCYEyutEFB2kRzf4D9Gqppn3iWX71sNtrKcuw== + dependencies: + "@algolia/client-common" "5.12.0" + +"@algolia/transporter@4.24.0": + version "4.24.0" + resolved "https://registry.yarnpkg.com/@algolia/transporter/-/transporter-4.24.0.tgz#226bb1f8af62430374c1972b2e5c8580ab275102" + integrity sha512-86nI7w6NzWxd1Zp9q3413dRshDqAzSbsQjhcDhPIatEFiZrL1/TjnHL8S7jVKFePlIMzDsZWXAXwXzcok9c5oA== + dependencies: + "@algolia/cache-common" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/requester-common" "4.24.0" "@ampproject/remapping@^2.2.0": version "2.2.0" @@ -166,88 +312,58 @@ resolved "https://registry.yarnpkg.com/@argos-ci/playwright/-/playwright-0.0.7.tgz#af8ba506415a9685e213bbc20379f4eb31615d95" integrity sha512-E+v943CBVrw0HhX3yI36fUqHbyBoWDpxSM0wsU4D5NMijnlL5sA9LVqHz6pfTJ8JbFaA4zBtbiVMHK9xCuPObg== -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4", "@babel/code-frame@^7.16.0", "@babel/code-frame@^7.18.6", "@babel/code-frame@^7.21.4", "@babel/code-frame@^7.8.3": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.25.9", "@babel/code-frame@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.26.2.tgz#4b5fab97d33338eff916235055f0ebc21e573a85" + integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ== + dependencies: + "@babel/helper-validator-identifier" "^7.25.9" + js-tokens "^4.0.0" + picocolors "^1.0.0" + +"@babel/code-frame@^7.16.0", "@babel/code-frame@^7.8.3": version "7.21.4" resolved "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.21.4.tgz" integrity sha512-LYvhNKfwWSPpocw8GI7gpK2nq3HSDuEPC/uSYaALSJu9xjsalaaYFOq0Pwt5KmVqwEbZlDu81aLXwBOmD/Fv9g== dependencies: "@babel/highlight" "^7.18.6" -"@babel/code-frame@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.24.7.tgz#882fd9e09e8ee324e496bd040401c6f046ef4465" - integrity sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA== - dependencies: - "@babel/highlight" "^7.24.7" - picocolors "^1.0.0" - -"@babel/compat-data@^7.13.11", "@babel/compat-data@^7.18.6", "@babel/compat-data@^7.21.4": - version "7.21.4" - resolved "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.4.tgz" - integrity sha512-/DYyDpeCfaVinT40FPGdkkb+lYSKvsVuMjDAG7jPOWWiM1ibOaB9CXJAlc4d1QpP/U2q2P9jbrSlClKSErd55g== - -"@babel/core@7.12.9": - version "7.12.9" - resolved "https://registry.npmjs.org/@babel/core/-/core-7.12.9.tgz" - integrity sha512-gTXYh3M5wb7FRXQy+FErKFAv90BnlOuNn1QkCK2lREoPAjrQCO49+HVSrFoe5uakFAF5eenS75KbO2vQiLrTMQ== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.12.5" - "@babel/helper-module-transforms" "^7.12.1" - "@babel/helpers" "^7.12.5" - "@babel/parser" "^7.12.7" - "@babel/template" "^7.12.7" - "@babel/traverse" "^7.12.9" - "@babel/types" "^7.12.7" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.1" - json5 "^2.1.2" - lodash "^4.17.19" - resolve "^1.3.2" - semver "^5.4.1" - source-map "^0.5.0" +"@babel/compat-data@^7.22.6", "@babel/compat-data@^7.25.9", "@babel/compat-data@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.26.2.tgz#278b6b13664557de95b8f35b90d96785850bb56e" + integrity sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg== -"@babel/core@^7.15.5", "@babel/core@^7.18.6": - version "7.21.4" - resolved "https://registry.npmjs.org/@babel/core/-/core-7.21.4.tgz" - integrity sha512-qt/YV149Jman/6AfmlxJ04LMIu8bMoyl3RB91yTFrxQmgbrSvQMy7cI8Q62FHx1t8wJ8B5fu0UDoLwHAhUo1QA== +"@babel/core@^7.21.3", "@babel/core@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.26.0.tgz#d78b6023cc8f3114ccf049eb219613f74a747b40" + integrity sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg== dependencies: "@ampproject/remapping" "^2.2.0" - "@babel/code-frame" "^7.21.4" - "@babel/generator" "^7.21.4" - "@babel/helper-compilation-targets" "^7.21.4" - "@babel/helper-module-transforms" "^7.21.2" - "@babel/helpers" "^7.21.0" - "@babel/parser" "^7.21.4" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.4" - "@babel/types" "^7.21.4" - convert-source-map "^1.7.0" + "@babel/code-frame" "^7.26.0" + "@babel/generator" "^7.26.0" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-module-transforms" "^7.26.0" + "@babel/helpers" "^7.26.0" + "@babel/parser" "^7.26.0" + "@babel/template" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.26.0" + convert-source-map "^2.0.0" debug "^4.1.0" gensync "^1.0.0-beta.2" - json5 "^2.2.2" - semver "^6.3.0" + json5 "^2.2.3" + semver "^6.3.1" -"@babel/generator@^7.12.5", "@babel/generator@^7.18.7", "@babel/generator@^7.21.4": - version "7.21.4" - resolved "https://registry.npmjs.org/@babel/generator/-/generator-7.21.4.tgz" - integrity sha512-NieM3pVIYW2SwGzKoqfPrQsf4xGs9M9AIG3ThppsSRmO+m7eQhmI6amajKMUeIO37wFfsvnvcxQFx6x6iqxDnA== +"@babel/generator@^7.25.9", "@babel/generator@^7.26.0": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.26.2.tgz#87b75813bec87916210e5e01939a4c823d6bb74f" + integrity sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw== dependencies: - "@babel/types" "^7.21.4" - "@jridgewell/gen-mapping" "^0.3.2" - "@jridgewell/trace-mapping" "^0.3.17" - jsesc "^2.5.1" - -"@babel/generator@^7.25.4": - version "7.25.5" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.25.5.tgz#b31cf05b3fe8c32d206b6dad03bb0aacbde73450" - integrity sha512-abd43wyLfbWoxC6ahM8xTkqLpGB2iWBVyuKC9/srhFunCd1SDNrV1s72bBpK4hLj8KLzHBBcOblvLQZBNw9r3w== - dependencies: - "@babel/types" "^7.25.4" + "@babel/parser" "^7.26.2" + "@babel/types" "^7.26.0" "@jridgewell/gen-mapping" "^0.3.5" "@jridgewell/trace-mapping" "^0.3.25" - jsesc "^2.5.1" + jsesc "^3.0.2" "@babel/helper-annotate-as-pure@^7.18.6": version "7.18.6" @@ -256,37 +372,44 @@ dependencies: "@babel/types" "^7.18.6" -"@babel/helper-builder-binary-assignment-operator-visitor@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.18.6.tgz" - integrity sha512-KT10c1oWEpmrIRYnthbzHgoOf6B+Xd6a5yhdbNtdhtG7aO1or5HViuf1TQR36xY/QprXA5nvxO6nAjhJ4y38jw== +"@babel/helper-annotate-as-pure@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.25.9.tgz#d8eac4d2dc0d7b6e11fa6e535332e0d3184f06b4" + integrity sha512-gv7320KBUFJz1RnylIg5WWYPRXKZ884AGkYpgpWW02TH66Dl+HaC1t1CKd0z3R4b6hdYEcmrNZHUmfCP+1u3/g== dependencies: - "@babel/helper-explode-assignable-expression" "^7.18.6" - "@babel/types" "^7.18.6" + "@babel/types" "^7.25.9" -"@babel/helper-compilation-targets@^7.13.0", "@babel/helper-compilation-targets@^7.18.6", "@babel/helper-compilation-targets@^7.21.4": - version "7.21.4" - resolved "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.21.4.tgz" - integrity sha512-Fa0tTuOXZ1iL8IeDFUWCzjZcn+sJGd9RZdH9esYVjEejGmzf+FFYQpMi/kZUk2kPy/q1H3/GPw7np8qar/stfg== +"@babel/helper-builder-binary-assignment-operator-visitor@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.25.9.tgz#f41752fe772a578e67286e6779a68a5a92de1ee9" + integrity sha512-C47lC7LIDCnz0h4vai/tpNOI95tCd5ZT3iBt/DBH5lXKHZsyNQv18yf1wIIg2ntiQNgmAvA+DgZ82iW8Qdym8g== + dependencies: + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" + +"@babel/helper-compilation-targets@^7.22.6", "@babel/helper-compilation-targets@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz#55af025ce365be3cdc0c1c1e56c6af617ce88875" + integrity sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ== dependencies: - "@babel/compat-data" "^7.21.4" - "@babel/helper-validator-option" "^7.21.0" - browserslist "^4.21.3" + "@babel/compat-data" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + browserslist "^4.24.0" lru-cache "^5.1.1" - semver "^6.3.0" + semver "^6.3.1" -"@babel/helper-create-class-features-plugin@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.18.6.tgz" - integrity sha512-YfDzdnoxHGV8CzqHGyCbFvXg5QESPFkXlHtvdCkesLjjVMT2Adxe4FGUR5ChIb3DxSaXO12iIOCWoXdsUVwnqw== +"@babel/helper-create-class-features-plugin@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.9.tgz#7644147706bb90ff613297d49ed5266bde729f83" + integrity sha512-UTZQMvt0d/rSz6KI+qdu7GQze5TIajwTS++GUozlw8VBJDEOAqSXwm1WvmYEZwqdqSGQshRocPDqrt4HBZB3fQ== dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-function-name" "^7.18.6" - "@babel/helper-member-expression-to-functions" "^7.18.6" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-member-expression-to-functions" "^7.25.9" + "@babel/helper-optimise-call-expression" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/traverse" "^7.25.9" + semver "^6.3.1" "@babel/helper-create-regexp-features-plugin@^7.18.6": version "7.18.6" @@ -296,681 +419,562 @@ "@babel/helper-annotate-as-pure" "^7.18.6" regexpu-core "^5.1.0" -"@babel/helper-define-polyfill-provider@^0.3.1": - version "0.3.1" - resolved "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.3.1.tgz" - integrity sha512-J9hGMpJQmtWmj46B3kBHmL38UhJGhYX7eqkcq+2gsstyYt341HmPeWspihX43yVRA0mS+8GGk2Gckc7bY/HCmA== +"@babel/helper-create-regexp-features-plugin@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.9.tgz#3e8999db94728ad2b2458d7a470e7770b7764e26" + integrity sha512-ORPNZ3h6ZRkOyAa/SaHU+XsLZr0UQzRwuDQ0cczIA17nAzZ+85G5cVkOJIj7QavLZGSe8QXUmNFxSZzjcZF9bw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + regexpu-core "^6.1.1" + semver "^6.3.1" + +"@babel/helper-define-polyfill-provider@^0.6.2": + version "0.6.2" + resolved "https://registry.yarnpkg.com/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz#18594f789c3594acb24cfdb4a7f7b7d2e8bd912d" + integrity sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ== dependencies: - "@babel/helper-compilation-targets" "^7.13.0" - "@babel/helper-module-imports" "^7.12.13" - "@babel/helper-plugin-utils" "^7.13.0" - "@babel/traverse" "^7.13.0" + "@babel/helper-compilation-targets" "^7.22.6" + "@babel/helper-plugin-utils" "^7.22.5" debug "^4.1.1" lodash.debounce "^4.0.8" resolve "^1.14.2" - semver "^6.1.2" - -"@babel/helper-environment-visitor@^7.18.6", "@babel/helper-environment-visitor@^7.18.9": - version "7.18.9" - resolved "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.18.9.tgz" - integrity sha512-3r/aACDJ3fhQ/EVgFy0hpj8oHyHpQc+LPtJoY9SzTThAsStm4Ptegq92vqKoE3vD706ZVFWITnMnxucw+S9Ipg== -"@babel/helper-explode-assignable-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.18.6.tgz" - integrity sha512-eyAYAsQmB80jNfg4baAtLeWAQHfHFiR483rzFK+BhETlGZaQC9bsfrugfXDCbRHLQbIA7U5NxhhOxN7p/dWIcg== +"@babel/helper-member-expression-to-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.25.9.tgz#9dfffe46f727005a5ea29051ac835fb735e4c1a3" + integrity sha512-wbfdZ9w5vk0C0oyHqAJbc62+vet5prjj01jjJ8sKn3j9h3MQQlflEdXYvuqRWjHnM12coDEqiC1IRCi0U/EKwQ== dependencies: - "@babel/types" "^7.18.6" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/helper-function-name@^7.18.6": - version "7.21.0" - resolved "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz" - integrity sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg== - dependencies: - "@babel/template" "^7.20.7" - "@babel/types" "^7.21.0" - -"@babel/helper-hoist-variables@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.18.6.tgz" - integrity sha512-UlJQPkFqFULIcyW5sbzgbkxn2FKRgwWiRexcuaR8RNJRy8+LLveqPjwZV/bwrLZCN0eUHD/x8D0heK1ozuoo6Q== - dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-member-expression-to-functions@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.18.6.tgz" - integrity sha512-CeHxqwwipekotzPDUuJOfIMtcIHBuc7WAzLmTYWctVigqS5RktNMQ5bEwQSuGewzYnCtTWa3BARXeiLxDTv+Ng== +"@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.25.9.tgz#e7f8d20602ebdbf9ebbea0a0751fb0f2a4141715" + integrity sha512-tnUA4RsrmflIM6W6RFTLFSXITtl0wKjgpnLgXyowocVPrbYrLUXSBXDgTs8BlbmIzIdlBySRQjINYs2BAkiLtw== dependencies: - "@babel/types" "^7.18.6" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/helper-module-imports@^7.12.13", "@babel/helper-module-imports@^7.16.7", "@babel/helper-module-imports@^7.18.6": +"@babel/helper-module-imports@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.18.6.tgz" integrity sha512-0NFvs3VkuSYbFi1x2Vd6tKrywq+z/cLeYC/RJNFrIX/30Bf5aiGYbtvGXolEktzJH8o5E5KJ3tT+nkxuuZFVlA== dependencies: "@babel/types" "^7.18.6" -"@babel/helper-module-transforms@^7.12.1", "@babel/helper-module-transforms@^7.18.6", "@babel/helper-module-transforms@^7.21.2": - version "7.21.2" - resolved "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz" - integrity sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ== +"@babel/helper-module-transforms@^7.25.9", "@babel/helper-module-transforms@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.26.0.tgz#8ce54ec9d592695e58d84cd884b7b5c6a2fdeeae" + integrity sha512-xO+xu6B5K2czEnQye6BHA7DolFFmS3LB7stHZFaOLb1pAwO1HWLS8fXA+eh0A2yIvltPVmx3eNNDBJA2SLHXFw== dependencies: - "@babel/helper-environment-visitor" "^7.18.9" - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-simple-access" "^7.20.2" - "@babel/helper-split-export-declaration" "^7.18.6" - "@babel/helper-validator-identifier" "^7.19.1" - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.2" - "@babel/types" "^7.21.2" + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/helper-optimise-call-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.18.6.tgz" - integrity sha512-HP59oD9/fEHQkdcbgFCnbmgH5vIQTJbxh2yf+CdM89/glUNnuzr87Q8GIjGEnOktTROemO0Pe0iPAYbqZuOUiA== +"@babel/helper-optimise-call-expression@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.25.9.tgz#3324ae50bae7e2ab3c33f60c9a877b6a0146b54e" + integrity sha512-FIpuNaz5ow8VyrYcnXQTDRGvV6tTjkNtCK/RYNDXGSLlUD6cBuQTSw43CShGxjvfBTfcUA/r6UhUCbtYqkhcuQ== dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-plugin-utils@7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz" - integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== + "@babel/types" "^7.25.9" -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.12.13", "@babel/helper-plugin-utils@^7.13.0", "@babel/helper-plugin-utils@^7.14.5", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": +"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.18.6", "@babel/helper-plugin-utils@^7.8.0": version "7.18.6" resolved "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.18.6.tgz" integrity sha512-gvZnm1YAAxh13eJdkb9EWHBnF3eAub3XTLCZEehHT2kWxiKVRL64+ae5Y6Ivne0mVHmMYKT+xWgZO+gQhuLUBg== -"@babel/helper-remap-async-to-generator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.18.6.tgz" - integrity sha512-z5wbmV55TveUPZlCLZvxWHtrjuJd+8inFhk7DG0WW87/oJuGDcjDiu7HIvGcpf5464L6xKCg3vNkmlVVz9hwyQ== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-wrap-function" "^7.18.6" - "@babel/types" "^7.18.6" +"@babel/helper-plugin-utils@^7.22.5", "@babel/helper-plugin-utils@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.9.tgz#9cbdd63a9443a2c92a725cca7ebca12cc8dd9f46" + integrity sha512-kSMlyUVdWe25rEsRGviIgOWnoT/nfABVWlqt9N19/dIPWViAOW2s9wznP5tURbs/IDuNk4gPy3YdYRgH3uxhBw== -"@babel/helper-replace-supers@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.18.6.tgz" - integrity sha512-fTf7zoXnUGl9gF25fXCWE26t7Tvtyn6H4hkLSYhATwJvw2uYxd3aoXplMSe0g9XbwK7bmxNes7+FGO0rB/xC0g== +"@babel/helper-remap-async-to-generator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.9.tgz#e53956ab3d5b9fb88be04b3e2f31b523afd34b92" + integrity sha512-IZtukuUeBbhgOcaW2s06OXTzVNJR0ybm4W5xC1opWFFJMZbwRj5LCk+ByYH7WdZPZTt8KnFwA8pvjN2yqcPlgw== dependencies: - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-member-expression-to-functions" "^7.18.6" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/traverse" "^7.18.6" - "@babel/types" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-wrap-function" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/helper-simple-access@^7.18.6", "@babel/helper-simple-access@^7.20.2": - version "7.20.2" - resolved "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.20.2.tgz" - integrity sha512-+0woI/WPq59IrqDYbVGfshjT5Dmk/nnbdpcF8SnMhhXObpTq2KNBdLFRFrkVdbDOyUmHBCxzm5FHV1rACIkIbA== +"@babel/helper-replace-supers@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.25.9.tgz#ba447224798c3da3f8713fc272b145e33da6a5c5" + integrity sha512-IiDqTOTBQy0sWyeXyGSC5TBJpGFXBkRynjBeXsvbhQFKj2viwJC76Epz35YLU1fpe/Am6Vppb7W7zM4fPQzLsQ== dependencies: - "@babel/types" "^7.20.2" + "@babel/helper-member-expression-to-functions" "^7.25.9" + "@babel/helper-optimise-call-expression" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/helper-skip-transparent-expression-wrappers@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.18.6.tgz" - integrity sha512-4KoLhwGS9vGethZpAhYnMejWkX64wsnHPDwvOsKWU6Fg4+AlK2Jz3TyjQLMEPvz+1zemi/WBdkYxCD0bAfIkiw== +"@babel/helper-simple-access@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.25.9.tgz#6d51783299884a2c74618d6ef0f86820ec2e7739" + integrity sha512-c6WHXuiaRsJTyHYLJV75t9IqsmTbItYfdj99PnzYGQZkYKvan5/2jKJ7gu31J3/BJ/A18grImSPModuyG/Eo0Q== dependencies: - "@babel/types" "^7.18.6" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/helper-split-export-declaration@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.18.6.tgz" - integrity sha512-bde1etTx6ZyTmobl9LLMMQsaizFVZrquTEHOqKeQESMKo4PlObf+8+JA25ZsIpZhT/WEd39+vOdLXAFG/nELpA== +"@babel/helper-skip-transparent-expression-wrappers@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.25.9.tgz#0b2e1b62d560d6b1954893fd2b705dc17c91f0c9" + integrity sha512-K4Du3BFa3gvyhzgPcntrkDgZzQaq6uozzcpGbOO1OEJaI+EJdqWIMTLgFgQf6lrfiDFo5FU+BxKepI9RmZqahA== dependencies: - "@babel/types" "^7.18.6" - -"@babel/helper-string-parser@^7.19.4": - version "7.19.4" - resolved "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.19.4.tgz" - integrity sha512-nHtDoQcuqFmwYNYPz3Rah5ph2p8PFeFCsZk9A/48dPc/rGocJ5J3hAAZ7pb76VWX3fZKu+uEr/FhH5jLx7umrw== + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/helper-string-parser@^7.24.8": - version "7.24.8" - resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz#5b3329c9a58803d5df425e5785865881a81ca48d" - integrity sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ== +"@babel/helper-string-parser@^7.19.4", "@babel/helper-string-parser@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz#1aabb72ee72ed35789b4bbcad3ca2862ce614e8c" + integrity sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA== -"@babel/helper-validator-identifier@^7.18.6", "@babel/helper-validator-identifier@^7.19.1": - version "7.19.1" - resolved "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz" - integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== +"@babel/helper-validator-identifier@^7.19.1", "@babel/helper-validator-identifier@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz#24b64e2c3ec7cd3b3c547729b8d16871f22cbdc7" + integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ== -"@babel/helper-validator-identifier@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz#75b889cfaf9e35c2aaf42cf0d72c8e91719251db" - integrity sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w== - -"@babel/helper-validator-option@^7.18.6", "@babel/helper-validator-option@^7.21.0": +"@babel/helper-validator-option@^7.18.6": version "7.21.0" resolved "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz" integrity sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ== -"@babel/helper-wrap-function@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.18.6.tgz" - integrity sha512-I5/LZfozwMNbwr/b1vhhuYD+J/mU+gfGAj5td7l5Rv9WYmH6i3Om69WGKNmlIpsVW/mF6O5bvTKbvDQZVgjqOw== - dependencies: - "@babel/helper-function-name" "^7.18.6" - "@babel/template" "^7.18.6" - "@babel/traverse" "^7.18.6" - "@babel/types" "^7.18.6" +"@babel/helper-validator-option@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz#86e45bd8a49ab7e03f276577f96179653d41da72" + integrity sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw== -"@babel/helpers@^7.12.5", "@babel/helpers@^7.21.0": - version "7.21.0" - resolved "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz" - integrity sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA== +"@babel/helper-wrap-function@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.25.9.tgz#d99dfd595312e6c894bd7d237470025c85eea9d0" + integrity sha512-ETzz9UTjQSTmw39GboatdymDq4XIQbR8ySgVrylRhPOFpsd+JrKHIuF0de7GCWmem+T4uC5z7EZguod7Wj4A4g== dependencies: - "@babel/template" "^7.20.7" - "@babel/traverse" "^7.21.0" - "@babel/types" "^7.21.0" + "@babel/template" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@babel/types" "^7.25.9" -"@babel/highlight@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/highlight/-/highlight-7.18.6.tgz" - integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== +"@babel/helpers@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.26.0.tgz#30e621f1eba5aa45fe6f4868d2e9154d884119a4" + integrity sha512-tbhNuIxNcVb21pInl3ZSjksLCvgdZy9KwJ8brv993QtIVKJBBkYXz4q4ZbAv31GdnC+R90np23L5FbEBlthAEw== dependencies: - "@babel/helper-validator-identifier" "^7.18.6" - chalk "^2.0.0" - js-tokens "^4.0.0" + "@babel/template" "^7.25.9" + "@babel/types" "^7.26.0" -"@babel/highlight@^7.24.7": - version "7.24.7" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.24.7.tgz#a05ab1df134b286558aae0ed41e6c5f731bf409d" - integrity sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw== +"@babel/highlight@^7.18.6": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.25.9.tgz#8141ce68fc73757946f983b343f1231f4691acc6" + integrity sha512-llL88JShoCsth8fF8R4SJnIn+WLvR6ccFxu1H3FlMhDontdcmZWf2HgIZ7AIqV3Xcck1idlohrN4EUBQz6klbw== dependencies: - "@babel/helper-validator-identifier" "^7.24.7" + "@babel/helper-validator-identifier" "^7.25.9" chalk "^2.4.2" js-tokens "^4.0.0" picocolors "^1.0.0" -"@babel/parser@^7.12.7", "@babel/parser@^7.18.8", "@babel/parser@^7.20.7", "@babel/parser@^7.21.4": - version "7.21.4" - resolved "https://registry.npmjs.org/@babel/parser/-/parser-7.21.4.tgz" - integrity sha512-alVJj7k7zIxqBZ7BTRhz0IqJFxW1VJbm6N8JbcYhQ186df9ZBPbZBmWSqAMXwHGsCJdYks7z/voa3ibiS5bCIw== - -"@babel/parser@^7.25.0", "@babel/parser@^7.25.4": - version "7.25.4" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.25.4.tgz#af4f2df7d02440286b7de57b1c21acfb2a6f257a" - integrity sha512-nq+eWrOgdtu3jG5Os4TQP3x3cLA8hR8TvJNjD8vnPa20WGycimcparWnLK4jJhElTK6SDyuJo1weMKO/5LpmLA== +"@babel/parser@^7.25.9", "@babel/parser@^7.26.0", "@babel/parser@^7.26.2": + version "7.26.2" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.26.2.tgz#fd7b6f487cfea09889557ef5d4eeb9ff9a5abd11" + integrity sha512-DWMCZH9WA4Maitz2q21SRKHo9QXZxkDsbNZoVD62gusNtNBBqDg9i7uOhASfTfIGNzW+O+r7+jAlM8dwphcJKQ== dependencies: - "@babel/types" "^7.25.4" + "@babel/types" "^7.26.0" -"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.18.6.tgz" - integrity sha512-Dgxsyg54Fx1d4Nge8UnvTrED63vrwOdPmyvPzlNN/boaliRP54pm3pGzZD1SJUwrBA+Cs/xdG8kXX6Mn/RfISQ== +"@babel/plugin-bugfix-firefox-class-in-computed-class-key@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.9.tgz#cc2e53ebf0a0340777fff5ed521943e253b4d8fe" + integrity sha512-ZkRyVkThtxQ/J6nv3JFYv1RYY+JT5BvU0y3k5bWrmuG4woXypRa4PXmm9RhOwodRkYFWqC0C0cqcJ4OqR7kW+g== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.18.6.tgz" - integrity sha512-Udgu8ZRgrBrttVz6A0EVL0SJ1z+RLbIeqsu632SA1hf0awEppD6TvdznoH+orIF8wtFFAV/Enmw9Y+9oV8TQcw== +"@babel/plugin-bugfix-safari-class-field-initializer-scope@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.9.tgz#af9e4fb63ccb8abcb92375b2fcfe36b60c774d30" + integrity sha512-MrGRLZxLD/Zjj0gdU15dfs+HH/OXvnw/U4jJD8vpcP2CJQapPEv1IWwjc/qMg7ItBlPwSv1hRBbb7LeuANdcnw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.6" - "@babel/plugin-proposal-optional-chaining" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-async-generator-functions@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.18.6.tgz" - integrity sha512-WAz4R9bvozx4qwf74M+sfqPMKfSqwM0phxPTR6iJIi8robgzXwkEgmeJG1gEKhm6sDqT/U9aV3lfcqybIpev8w== +"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.9.tgz#e8dc26fcd616e6c5bf2bd0d5a2c151d4f92a9137" + integrity sha512-2qUwwfAFpJLZqxd02YW9btUCZHl+RFvdDkNfZwaIJrvB8Tesjsk8pEQkTvGwZXLqXUx/2oyY3ySRhm6HOXuCug== dependencies: - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-remap-async-to-generator" "^7.18.6" - "@babel/plugin-syntax-async-generators" "^7.8.4" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-class-properties@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.18.6.tgz" - integrity sha512-cumfXOF0+nzZrrN8Rf0t7M+tF6sZc7vhQwYQck9q1/5w2OExlD+b4v4RpMJFaV1Z7WcDRgO6FqvxqxGlwo+RHQ== +"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.25.9.tgz#807a667f9158acac6f6164b4beb85ad9ebc9e1d1" + integrity sha512-6xWgLZTJXwilVjlnV7ospI3xi+sl8lN8rXXbBD6vYn3UYDlGsag8wrZkKcSI8G6KgqKP7vNFaDgeDnfAABq61g== dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-transform-optional-chaining" "^7.25.9" -"@babel/plugin-proposal-class-static-block@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-class-static-block/-/plugin-proposal-class-static-block-7.18.6.tgz" - integrity sha512-+I3oIiNxrCpup3Gi8n5IGMwj0gOCAjcJUSQEcotNnCCPMEnixawOQ+KeJPlgfjzx+FKQ1QSyZOWe7wmoJp7vhw== +"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.9.tgz#de7093f1e7deaf68eadd7cc6b07f2ab82543269e" + integrity sha512-aLnMXYPnzwwqhYSCyXfKkIkYgJ8zv9RK+roo9DkTXz38ynIhd9XCbN08s3MGvqL2MYGVUGdRQLL/JqBIeJhJBg== dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-class-static-block" "^7.14.5" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/plugin-proposal-dynamic-import@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.18.6.tgz" - integrity sha512-1auuwmK+Rz13SJj36R+jqFPMJWyKEDd7lLSdOj4oJK0UTgGueSAtkrCvz9ewmgyU/P941Rv2fQwZJN8s6QruXw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" +"@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2": + version "7.21.0-placeholder-for-preset-env.2" + resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz#7844f9289546efa9febac2de4cfe358a050bd703" + integrity sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w== -"@babel/plugin-proposal-export-namespace-from@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.18.6.tgz" - integrity sha512-zr/QcUlUo7GPo6+X1wC98NJADqmy5QTFWWhqeQWiki4XHafJtLl/YMGkmRB2szDD2IYJCCdBTd4ElwhId9T7Xw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.18.6.tgz" - integrity sha512-lr1peyn9kOdbYc0xr0OdHTZ5FMqS6Di+H0Fz2I/JwMzGmzJETNeOFq2pBySw6X/KFL5EWDjlJuMsUGRFb8fQgQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - -"@babel/plugin-proposal-logical-assignment-operators@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.18.6.tgz" - integrity sha512-zMo66azZth/0tVd7gmkxOkOjs2rpHyhpcFo565PUP37hSp6hSd9uUKIfTDFMz58BwqgQKhJ9YxtM5XddjXVn+Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.18.6.tgz" - integrity sha512-wQxQzxYeJqHcfppzBDnm1yAY0jSRkUXR2z8RePZYrKwMKgMlE8+Z6LUno+bd6LvbGh8Gltvy74+9pIYkr+XkKA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - -"@babel/plugin-proposal-numeric-separator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.18.6.tgz" - integrity sha512-ozlZFogPqoLm8WBr5Z8UckIoE4YQ5KESVcNudyXOR8uqIkliTEgJ3RoketfG6pmzLdeZF0H/wjE9/cCEitBl7Q== +"@babel/plugin-syntax-dynamic-import@^7.8.3": + version "7.8.3" + resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" + integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" + "@babel/helper-plugin-utils" "^7.8.0" -"@babel/plugin-proposal-object-rest-spread@7.12.1": - version "7.12.1" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.1.tgz" - integrity sha512-s6SowJIjzlhx8o7lsFx5zmY4At6CTtDvgNQDdPzkBQucle58A6b/TTeEBYtyDgmcXjUTM+vE8YOGHZzzbc/ioA== +"@babel/plugin-syntax-import-assertions@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.26.0.tgz#620412405058efa56e4a564903b79355020f445f" + integrity sha512-QCWT5Hh830hK5EQa7XzuqIkQU9tT/whqbDz7kuaZMHFl1inRRg7JnuAEOQ0Ur0QUl0NufCk1msK2BeY79Aj/eg== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.0" - "@babel/plugin-transform-parameters" "^7.12.1" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-object-rest-spread@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.18.6.tgz" - integrity sha512-9yuM6wr4rIsKa1wlUAbZEazkCrgw2sMPEXCr4Rnwetu7cEW1NydkCWytLuYletbf8vFxdJxFhwEZqMpOx2eZyw== +"@babel/plugin-syntax-import-attributes@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.26.0.tgz#3b1412847699eea739b4f2602c74ce36f6b0b0f7" + integrity sha512-e2dttdsJ1ZTpi3B9UYGLw41hifAubg19AtCu/2I/F1QNVclOBr1dYpTdmdyZ84Xiz43BS/tCUkMAZNLv12Pi+A== dependencies: - "@babel/compat-data" "^7.18.6" - "@babel/helper-compilation-targets" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-transform-parameters" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-optional-catch-binding@^7.18.6": +"@babel/plugin-syntax-jsx@^7.18.6": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.18.6.tgz" - integrity sha512-Q40HEhs9DJQyaZfUjjn6vE8Cv4GmMHCYuMGIWUnlxH6400VGxOuwWsPt4FxXxJkC/5eOzgn0z21M9gMT4MOhbw== + resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz" + integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== dependencies: "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" -"@babel/plugin-proposal-optional-chaining@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.18.6.tgz" - integrity sha512-PatI6elL5eMzoypFAiYDpYQyMtXTn+iMhuxxQt5mAXD4fEmKorpSI3PHd+i3JXBJN3xyA6MvJv7at23HffFHwA== +"@babel/plugin-syntax-jsx@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.9.tgz#a34313a178ea56f1951599b929c1ceacee719290" + integrity sha512-ld6oezHQMZsZfp6pWtbjaNDF2tiiCYYDqQszHt5VV437lewP9aSi2Of99CK0D0XB21k7FLgnLcmQKyKzynfeAA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.6" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-private-methods@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.18.6.tgz" - integrity sha512-nutsvktDItsNn4rpGItSNV2sz1XwS+nfU0Rg8aCx3W3NOKVzdMjJRu0O5OkgDp3ZGICSTbgRpxZoWsxoKRvbeA== +"@babel/plugin-syntax-typescript@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.9.tgz#67dda2b74da43727cf21d46cf9afef23f4365399" + integrity sha512-hjMgRy5hb8uJJjUcdWunWVcoi9bGpJp8p5Ol1229PoN6aytsLwNMgmdftO23wnCLMfVmTwZDWMPNq/D1SY60JQ== dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-proposal-private-property-in-object@^7.18.6": +"@babel/plugin-syntax-unicode-sets-regex@^7.18.6": version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.18.6.tgz" - integrity sha512-9Rysx7FOctvT5ouj5JODjAFAkgGoudQuLPamZb0v1TGLpapdNaftzifU8NTWQm0IRjqoYypdrSmyWgkocDQ8Dw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - -"@babel/plugin-proposal-unicode-property-regex@^7.18.6", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.18.6.tgz" - integrity sha512-2BShG/d5yoZyXZfVePH91urL5wTG6ASZU9M4o03lKK8u8UW1y08OMttBSOADTcJrnPMpvDXRG3G8fyLh4ovs8w== + resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz#d49a3b3e6b52e5be6740022317580234a6a47357" + integrity sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg== dependencies: "@babel/helper-create-regexp-features-plugin" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== +"@babel/plugin-transform-arrow-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.25.9.tgz#7821d4410bee5daaadbb4cdd9a6649704e176845" + integrity sha512-6jmooXYIwn9ca5/RylZADJ+EnSxVUS5sjeJ9UPk6RWRzXCmOJCy6dqItPJFpw2cuCangPK4OYr5uhGKcmrm5Qg== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-class-properties@^7.12.13": - version "7.12.13" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz" - integrity sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA== +"@babel/plugin-transform-async-generator-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.9.tgz#1b18530b077d18a407c494eb3d1d72da505283a2" + integrity sha512-RXV6QAzTBbhDMO9fWwOmwwTuYaiPbggWQ9INdZqAYeSHyG7FzQ+nOZaUUjNwKv9pV3aE4WFqFm1Hnbci5tBCAw== dependencies: - "@babel/helper-plugin-utils" "^7.12.13" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-remap-async-to-generator" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/plugin-syntax-class-static-block@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz" - integrity sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw== +"@babel/plugin-transform-async-to-generator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.25.9.tgz#c80008dacae51482793e5a9c08b39a5be7e12d71" + integrity sha512-NT7Ejn7Z/LjUH0Gv5KsBCxh7BH3fbLTV0ptHvpeMvrt3cPThHfJfst9Wrb7S8EvJ7vRTFI7z+VAvFVEQn/m5zQ== dependencies: - "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-remap-async-to-generator" "^7.25.9" -"@babel/plugin-syntax-dynamic-import@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== +"@babel/plugin-transform-block-scoped-functions@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.25.9.tgz#5700691dbd7abb93de300ca7be94203764fce458" + integrity sha512-toHc9fzab0ZfenFpsyYinOX0J/5dgJVA2fm64xPewu7CoYHWEivIWKxkK2rMi4r3yQqLnVmheMXRdG+k239CgA== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== +"@babel/plugin-transform-block-scoping@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.9.tgz#c33665e46b06759c93687ca0f84395b80c0473a1" + integrity sha512-1F05O7AYjymAtqbsFETboN1NvBdcnzMerO+zlMyJBEz6WkMdejvGWw9p05iTSjC85RLlBseHHQpYaM4gzJkBGg== dependencies: - "@babel/helper-plugin-utils" "^7.8.3" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-import-assertions@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.18.6.tgz" - integrity sha512-/DU3RXad9+bZwrgWJQKbr39gYbJpLJHezqEzRzi/BHRlJ9zsQb4CK2CA/5apllXNomwA1qHwzvHl+AdEmC5krQ== +"@babel/plugin-transform-class-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.25.9.tgz#a8ce84fedb9ad512549984101fa84080a9f5f51f" + integrity sha512-bbMAII8GRSkcd0h0b4X+36GksxuheLFjP65ul9w6C3KgAamI3JqErNgSrosX6ZPj+Mpim5VvEbawXxJCyEUV3Q== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== +"@babel/plugin-transform-class-static-block@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.26.0.tgz#6c8da219f4eb15cae9834ec4348ff8e9e09664a0" + integrity sha512-6J2APTs7BDDm+UMqP1useWqhcRAXo0WIoVj26N7kPFB6S73Lgvyka4KTZYIxtgYXiN5HTyRObA72N2iu628iTQ== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-jsx@7.12.1": - version "7.12.1" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.1.tgz" - integrity sha512-1yRi7yAtB0ETgxdY9ti/p2TivUxJkTdhu/ZbF9MshVGqOx1TdB3b7xCXs49Fupgg50N45KcAsRP/ZqWjs9SRjg== +"@babel/plugin-transform-classes@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.9.tgz#7152457f7880b593a63ade8a861e6e26a4469f52" + integrity sha512-mD8APIXmseE7oZvZgGABDyM34GUmK45Um2TXiBUt7PnuAxrgoSVf123qUzPxEr/+/BHrRn5NMZCdE2m/1F8DGg== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" + "@babel/traverse" "^7.25.9" + globals "^11.1.0" -"@babel/plugin-syntax-jsx@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.18.6.tgz" - integrity sha512-6mmljtAedFGTWu2p/8WIORGwy+61PLgOMPOdazc7YoJ9ZCWUyFy3A6CpPkRKLKD1ToAesxX8KGEViAiLo9N+7Q== +"@babel/plugin-transform-computed-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.25.9.tgz#db36492c78460e534b8852b1d5befe3c923ef10b" + integrity sha512-HnBegGqXZR12xbcTHlJ9HGxw1OniltT26J5YpfruGqtUHlz/xKf/G2ak9e+t0rVqrjXa9WOhvYPz1ERfMj23AA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/template" "^7.25.9" -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== +"@babel/plugin-transform-destructuring@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.25.9.tgz#966ea2595c498224340883602d3cfd7a0c79cea1" + integrity sha512-WkCGb/3ZxXepmMiX101nnGiU+1CAdut8oHyEOHxkKuS1qKpU2SMXE2uSvfz8PBuLd49V6LEsbtyPhWC7fnkgvQ== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== +"@babel/plugin-transform-dotall-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.25.9.tgz#bad7945dd07734ca52fe3ad4e872b40ed09bb09a" + integrity sha512-t7ZQ7g5trIgSRYhI9pIJtRl64KHotutUJsh4Eze5l7olJv+mRSg4/MmbZ0tv1eeqRbdvo/+trvJD/Oc5DmW2cA== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-numeric-separator@^7.10.4": - version "7.10.4" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== +"@babel/plugin-transform-duplicate-keys@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.25.9.tgz#8850ddf57dce2aebb4394bb434a7598031059e6d" + integrity sha512-LZxhJ6dvBb/f3x8xwWIuyiAHy56nrRG3PeYTpBkkzkYRRQ6tJLu68lEF5VIqMUZiAV7a8+Tb78nEoMCMcqjXBw== dependencies: - "@babel/helper-plugin-utils" "^7.10.4" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-object-rest-spread@7.8.3", "@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== +"@babel/plugin-transform-duplicate-named-capturing-groups-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.9.tgz#6f7259b4de127721a08f1e5165b852fcaa696d31" + integrity sha512-0UfuJS0EsXbRvKnwcLjFtJy/Sxc5J5jhLHnFhy7u4zih97Hz6tJkLU+O+FMMrNZrosUPxDi6sYxJ/EA8jDiAog== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== +"@babel/plugin-transform-dynamic-import@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.25.9.tgz#23e917de63ed23c6600c5dd06d94669dce79f7b8" + integrity sha512-GCggjexbmSLaFhqsojeugBpeaRIgWNTcgKVq/0qIteFEqY2A+b9QidYadrWlnbWQUrW5fn+mCvf3tr7OeBFTyg== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== +"@babel/plugin-transform-exponentiation-operator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.25.9.tgz#ece47b70d236c1d99c263a1e22b62dc20a4c8b0f" + integrity sha512-KRhdhlVk2nObA5AYa7QMgTMTVJdfHprfpAk4DjZVtllqRg9qarilstTKEhpVjyt+Npi8ThRyiV8176Am3CodPA== dependencies: - "@babel/helper-plugin-utils" "^7.8.0" + "@babel/helper-builder-binary-assignment-operator-visitor" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-private-property-in-object@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz" - integrity sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg== +"@babel/plugin-transform-export-namespace-from@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.25.9.tgz#90745fe55053394f554e40584cda81f2c8a402a2" + integrity sha512-2NsEz+CxzJIVOPx2o9UsW1rXLqtChtLoVnwYHHiB04wS5sgn7mrV45fWMBX0Kk+ub9uXytVYfNP2HjbVbCB3Ww== dependencies: - "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-syntax-top-level-await@^7.14.5": - version "7.14.5" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz" - integrity sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw== +"@babel/plugin-transform-for-of@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.25.9.tgz#4bdc7d42a213397905d89f02350c5267866d5755" + integrity sha512-LqHxduHoaGELJl2uhImHwRQudhCM50pT46rIBNvtT/Oql3nqiS3wOwP+5ten7NpYSXrrVLgtZU3DZmPtWZo16A== dependencies: - "@babel/helper-plugin-utils" "^7.14.5" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" -"@babel/plugin-syntax-typescript@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.18.6.tgz" - integrity sha512-mAWAuq4rvOepWCBid55JuRNvpTNf2UGVgoz4JV0fXEKolsVZDzsa4NqCef758WZJj/GDu0gVGItjKFiClTAmZA== +"@babel/plugin-transform-function-name@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.9.tgz#939d956e68a606661005bfd550c4fc2ef95f7b97" + integrity sha512-8lP+Yxjv14Vc5MuWBpJsoUCd3hD6V9DgBon2FVYL4jJgbnVQ9fTgYmonchzZJOVNgzEgbxp4OwAf6xz6M/14XA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/plugin-transform-arrow-functions@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.18.6.tgz" - integrity sha512-9S9X9RUefzrsHZmKMbDXxweEH+YlE8JJEuat9FdvW9Qh1cw7W64jELCtWNkPBPX5En45uy28KGvA/AySqUh8CQ== +"@babel/plugin-transform-json-strings@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.25.9.tgz#c86db407cb827cded902a90c707d2781aaa89660" + integrity sha512-xoTMk0WXceiiIvsaquQQUaLLXSW1KJ159KP87VilruQm0LNNGxWzahxSS6T6i4Zg3ezp4vA4zuwiNUR53qmQAw== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-async-to-generator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.18.6.tgz" - integrity sha512-ARE5wZLKnTgPW7/1ftQmSi1CmkqqHo2DNmtztFhvgtOWSDfq0Cq9/9L+KnZNYSNrydBekhW3rwShduf59RoXag== +"@babel/plugin-transform-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.9.tgz#1a1c6b4d4aa59bc4cad5b6b3a223a0abd685c9de" + integrity sha512-9N7+2lFziW8W9pBl2TzaNht3+pgMIRP74zizeCSrtnSKVdUl8mAjjOP2OOVQAfZ881P2cNjDj1uAMEdeD50nuQ== dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-remap-async-to-generator" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-block-scoped-functions@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.18.6.tgz" - integrity sha512-ExUcOqpPWnliRcPqves5HJcJOvHvIIWfuS4sroBUenPuMdmW+SMHDakmtS7qOo13sVppmUijqeTv7qqGsvURpQ== +"@babel/plugin-transform-logical-assignment-operators@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.25.9.tgz#b19441a8c39a2fda0902900b306ea05ae1055db7" + integrity sha512-wI4wRAzGko551Y8eVf6iOY9EouIDTtPb0ByZx+ktDGHwv6bHFimrgJM/2T021txPZ2s4c7bqvHbd+vXG6K948Q== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-block-scoping@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.18.6.tgz" - integrity sha512-pRqwb91C42vs1ahSAWJkxOxU1RHWDn16XAa6ggQ72wjLlWyYeAcLvTtE0aM8ph3KNydy9CQF2nLYcjq1WysgxQ== +"@babel/plugin-transform-member-expression-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.25.9.tgz#63dff19763ea64a31f5e6c20957e6a25e41ed5de" + integrity sha512-PYazBVfofCQkkMzh2P6IdIUaCEWni3iYEerAsRWuVd8+jlM1S9S9cz1dF9hIzyoZ8IA3+OwVYIp9v9e+GbgZhA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-classes@^7.18.6": - version "7.18.8" - resolved "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.18.8.tgz" - integrity sha512-RySDoXdF6hgHSHuAW4aLGyVQdmvEX/iJtjVre52k0pxRq4hzqze+rAVP++NmNv596brBpYmaiKgTZby7ziBnVg== +"@babel/plugin-transform-modules-amd@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.25.9.tgz#49ba478f2295101544abd794486cd3088dddb6c5" + integrity sha512-g5T11tnI36jVClQlMlt4qKDLlWnG5pP9CSM4GhdRciTNMRgkfpo5cR6b4rGIOYPgRRuFAvwjPQ/Yk+ql4dyhbw== dependencies: - "@babel/helper-annotate-as-pure" "^7.18.6" - "@babel/helper-environment-visitor" "^7.18.6" - "@babel/helper-function-name" "^7.18.6" - "@babel/helper-optimise-call-expression" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" - "@babel/helper-split-export-declaration" "^7.18.6" - globals "^11.1.0" + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-computed-properties@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.18.6.tgz" - integrity sha512-9repI4BhNrR0KenoR9vm3/cIc1tSBIo+u1WVjKCAynahj25O8zfbiE6JtAtHPGQSs4yZ+bA8mRasRP+qc+2R5A== +"@babel/plugin-transform-modules-commonjs@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.25.9.tgz#d165c8c569a080baf5467bda88df6425fc060686" + integrity sha512-dwh2Ol1jWwL2MgkCzUSOvfmKElqQcuswAZypBSUsScMXvgdT8Ekq5YA6TtqpTVWH+4903NmboMuH1o9i8Rxlyg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-simple-access" "^7.25.9" -"@babel/plugin-transform-destructuring@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.18.6.tgz" - integrity sha512-tgy3u6lRp17ilY8r1kP4i2+HDUwxlVqq3RTc943eAWSzGgpU1qhiKpqZ5CMyHReIYPHdo3Kg8v8edKtDqSVEyQ== +"@babel/plugin-transform-modules-systemjs@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.9.tgz#8bd1b43836269e3d33307151a114bcf3ba6793f8" + integrity sha512-hyss7iIlH/zLHaehT+xwiymtPOpsiwIIRlCAOwBB04ta5Tt+lNItADdlXw3jAWZ96VJ2jlhl/c+PNIQPKNfvcA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + "@babel/traverse" "^7.25.9" -"@babel/plugin-transform-dotall-regex@^7.18.6", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.18.6.tgz" - integrity sha512-6S3jpun1eEbAxq7TdjLotAsl4WpQI9DxfkycRcKrjhQYzU87qpXdknpBg/e+TdcMehqGnLFi7tnFUBR02Vq6wg== +"@babel/plugin-transform-modules-umd@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.25.9.tgz#6710079cdd7c694db36529a1e8411e49fcbf14c9" + integrity sha512-bS9MVObUgE7ww36HEfwe6g9WakQ0KF07mQF74uuXdkoziUPfKyu/nIm663kz//e5O1nPInPFx36z7WJmJ4yNEw== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-module-transforms" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-duplicate-keys@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.18.6.tgz" - integrity sha512-NJU26U/208+sxYszf82nmGYqVF9QN8py2HFTblPT9hbawi8+1C5a9JubODLTGFuT0qlkqVinmkwOD13s0sZktg== +"@babel/plugin-transform-named-capturing-groups-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.25.9.tgz#454990ae6cc22fd2a0fa60b3a2c6f63a38064e6a" + integrity sha512-oqB6WHdKTGl3q/ItQhpLSnWWOpjUJLsOCLVyeFgeTktkBSCiurvPOsyt93gibI9CmuKvTUEtWmG5VhZD+5T/KA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-exponentiation-operator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.18.6.tgz" - integrity sha512-wzEtc0+2c88FVR34aQmiz56dxEkxr2g8DQb/KfaFa1JYXOFVsbhvAonFN6PwVWj++fKmku8NP80plJ5Et4wqHw== +"@babel/plugin-transform-new-target@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.25.9.tgz#42e61711294b105c248336dcb04b77054ea8becd" + integrity sha512-U/3p8X1yCSoKyUj2eOBIx3FOn6pElFOKvAAGf8HTtItuPyB+ZeOqfn+mvTtg9ZlOAjsPdK3ayQEjqHjU/yLeVQ== dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-for-of@^7.18.6": - version "7.18.8" - resolved "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.18.8.tgz" - integrity sha512-yEfTRnjuskWYo0k1mHUqrVWaZwrdq8AYbfrpqULOJOaucGSp4mNMVps+YtA8byoevxS/urwU75vyhQIxcCgiBQ== +"@babel/plugin-transform-nullish-coalescing-operator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.25.9.tgz#bcb1b0d9e948168102d5f7104375ca21c3266949" + integrity sha512-ENfftpLZw5EItALAD4WsY/KUWvhUlZndm5GC7G3evUsVeSJB6p0pBeLQUnRnBCBx7zV0RKQjR9kCuwrsIrjWog== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-function-name@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.18.6.tgz" - integrity sha512-kJha/Gbs5RjzIu0CxZwf5e3aTTSlhZnHMT8zPWnJMjNpLOUgqevg+PN5oMH68nMCXnfiMo4Bhgxqj59KHTlAnA== +"@babel/plugin-transform-numeric-separator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.25.9.tgz#bfed75866261a8b643468b0ccfd275f2033214a1" + integrity sha512-TlprrJ1GBZ3r6s96Yq8gEQv82s8/5HnCVHtEJScUj90thHQbwe+E5MLhi2bbNHBEJuzrvltXSru+BUxHDoog7Q== dependencies: - "@babel/helper-compilation-targets" "^7.18.6" - "@babel/helper-function-name" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.18.6.tgz" - integrity sha512-x3HEw0cJZVDoENXOp20HlypIHfl0zMIhMVZEBVTfmqbObIpsMxMbmU5nOEO8R7LYT+z5RORKPlTI5Hj4OsO9/Q== +"@babel/plugin-transform-object-rest-spread@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.25.9.tgz#0203725025074164808bcf1a2cfa90c652c99f18" + integrity sha512-fSaXafEE9CVHPweLYw4J0emp1t8zYTXyzN3UuG+lylqkvYd7RMrsOQ8TYx5RF231be0vqtFC6jnx3UmpJmKBYg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-parameters" "^7.25.9" -"@babel/plugin-transform-member-expression-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.18.6.tgz" - integrity sha512-qSF1ihLGO3q+/g48k85tUjD033C29TNTVB2paCwZPVmOsjn9pClvYYrM2VeJpBY2bcNkuny0YUyTNRyRxJ54KA== +"@babel/plugin-transform-object-super@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.25.9.tgz#385d5de135162933beb4a3d227a2b7e52bb4cf03" + integrity sha512-Kj/Gh+Rw2RNLbCK1VAWj2U48yxxqL2x0k10nPtSdRa0O2xnHXalD0s+o1A6a0W43gJ00ANo38jxkQreckOzv5A== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-replace-supers" "^7.25.9" -"@babel/plugin-transform-modules-amd@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.18.6.tgz" - integrity sha512-Pra5aXsmTsOnjM3IajS8rTaLCy++nGM4v3YR4esk5PCsyg9z8NA5oQLwxzMUtDBd8F+UmVza3VxoAaWCbzH1rg== +"@babel/plugin-transform-optional-catch-binding@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.25.9.tgz#10e70d96d52bb1f10c5caaac59ac545ea2ba7ff3" + integrity sha512-qM/6m6hQZzDcZF3onzIhZeDHDO43bkNNlOX0i8n3lR6zLbu0GN2d8qfM/IERJZYauhAHSLHy39NF0Ctdvcid7g== dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-modules-commonjs@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.18.6.tgz" - integrity sha512-Qfv2ZOWikpvmedXQJDSbxNqy7Xr/j2Y8/KfijM0iJyKkBTmWuvCA1yeH1yDM7NJhBW/2aXxeucLj6i80/LAJ/Q== +"@babel/plugin-transform-optional-chaining@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.25.9.tgz#e142eb899d26ef715435f201ab6e139541eee7dd" + integrity sha512-6AvV0FsLULbpnXeBjrY4dmWF8F7gf8QnvTEoO/wX/5xm/xE1Xo8oPuD3MPS+KS9f9XBEAWN7X1aWr4z9HdOr7A== dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-simple-access" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" -"@babel/plugin-transform-modules-systemjs@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.18.6.tgz" - integrity sha512-UbPYpXxLjTw6w6yXX2BYNxF3p6QY225wcTkfQCy3OMnSlS/C3xGtwUjEzGkldb/sy6PWLiCQ3NbYfjWUTI3t4g== +"@babel/plugin-transform-parameters@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.25.9.tgz#b856842205b3e77e18b7a7a1b94958069c7ba257" + integrity sha512-wzz6MKwpnshBAiRmn4jR8LYz/g8Ksg0o80XmwZDlordjwEk9SxBzTWC7F5ef1jhbrbOW2DJ5J6ayRukrJmnr0g== dependencies: - "@babel/helper-hoist-variables" "^7.18.6" - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-identifier" "^7.18.6" - babel-plugin-dynamic-import-node "^2.3.3" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-modules-umd@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.18.6.tgz" - integrity sha512-dcegErExVeXcRqNtkRU/z8WlBLnvD4MRnHgNs3MytRO1Mn1sHRyhbcpYbVMGclAqOjdW+9cfkdZno9dFdfKLfQ== +"@babel/plugin-transform-private-methods@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.25.9.tgz#847f4139263577526455d7d3223cd8bda51e3b57" + integrity sha512-D/JUozNpQLAPUVusvqMxyvjzllRaF8/nSrP1s2YGQT/W4LHK4xxsMcHjhOGTS01mp9Hda8nswb+FblLdJornQw== dependencies: - "@babel/helper-module-transforms" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-named-capturing-groups-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.18.6.tgz" - integrity sha512-UmEOGF8XgaIqD74bC8g7iV3RYj8lMf0Bw7NJzvnS9qQhM4mg+1WHKotUIdjxgD2RGrgFLZZPCFPFj3P/kVDYhg== +"@babel/plugin-transform-private-property-in-object@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.25.9.tgz#9c8b73e64e6cc3cbb2743633885a7dd2c385fe33" + integrity sha512-Evf3kcMqzXA3xfYJmZ9Pg1OvKdtqsDMSWBDzZOPLvHiTt36E75jLDQo5w1gtRU95Q4E5PDttrTf25Fw8d/uWLw== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-new-target@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.18.6.tgz" - integrity sha512-DjwFA/9Iu3Z+vrAn+8pBUGcjhxKguSMlsFqeCKbhb9BAV756v0krzVK04CRDi/4aqmk8BsHb4a/gFcaA5joXRw== +"@babel/plugin-transform-property-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.25.9.tgz#d72d588bd88b0dec8b62e36f6fda91cedfe28e3f" + integrity sha512-IvIUeV5KrS/VPavfSM/Iu+RE6llrHrYIKY1yfCzyO/lMXHQ+p7uGhonmGVisv6tSBSVgWzMBohTcvkC9vQcQFA== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-object-super@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.18.6.tgz" - integrity sha512-uvGz6zk+pZoS1aTZrOvrbj6Pp/kK2mp45t2B+bTDre2UgsZZ8EZLSJtUg7m/no0zOJUWgFONpB7Zv9W2tSaFlA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-replace-supers" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-parameters@^7.12.1", "@babel/plugin-transform-parameters@^7.18.6": - version "7.18.8" - resolved "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.18.8.tgz" - integrity sha512-ivfbE3X2Ss+Fj8nnXvKJS6sjRG4gzwPMsP+taZC+ZzEGjAYlvENixmt1sZ5Ca6tWls+BlKSGKPJ6OOXvXCbkFg== +"@babel/plugin-transform-react-constant-elements@^7.21.3": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.25.9.tgz#08a1de35a301929b60fdf2788a54b46cd8ecd0ef" + integrity sha512-Ncw2JFsJVuvfRsa2lSHiC55kETQVLSnsYGQ1JDDwkUeWGTL/8Tom8aLTnlqgoeuopWrbbGndrc9AlLYrIosrow== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-property-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.18.6.tgz" - integrity sha512-cYcs6qlgafTud3PAzrrRNbQtfpQ8+y/+M5tKmksS9+M1ckbH6kzY8MrexEM9mcA6JDsukE19iIRvAyYl463sMg== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-react-constant-elements@^7.14.5": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-react-constant-elements/-/plugin-transform-react-constant-elements-7.18.6.tgz" - integrity sha512-4g5H1bonF1dqgMe+wQ2fvDlRZ/mN/KwArk13teDv+xxn+pUDEiiDluQd6D2B30MJcL1u3qr0WZpfq0mw9/zSqA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" "@babel/plugin-transform-react-display-name@^7.18.6": version "7.18.6" @@ -979,6 +983,13 @@ dependencies: "@babel/helper-plugin-utils" "^7.18.6" +"@babel/plugin-transform-react-display-name@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-display-name/-/plugin-transform-react-display-name-7.25.9.tgz#4b79746b59efa1f38c8695065a92a9f5afb24f7d" + integrity sha512-KJfMlYIUxQB1CJfO3e0+h0ZHWOTLCPP115Awhaz8U0Zpq36Gl/cXlpoyMRnUWlhNUBAzldnCiAZNvCDj7CrKxQ== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-transform-react-jsx-development@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.18.6.tgz" @@ -986,6 +997,13 @@ dependencies: "@babel/plugin-transform-react-jsx" "^7.18.6" +"@babel/plugin-transform-react-jsx-development@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx-development/-/plugin-transform-react-jsx-development-7.25.9.tgz#8fd220a77dd139c07e25225a903b8be8c829e0d7" + integrity sha512-9mj6rm7XVYs4mdLIpbZnHOYdpW42uoiBCTVowg7sP1thUOiANgMb4UtpRivR0pp5iL+ocvUv7X4mZgFRpJEzGw== + dependencies: + "@babel/plugin-transform-react-jsx" "^7.25.9" + "@babel/plugin-transform-react-jsx@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.18.6.tgz" @@ -997,6 +1015,17 @@ "@babel/plugin-syntax-jsx" "^7.18.6" "@babel/types" "^7.18.6" +"@babel/plugin-transform-react-jsx@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-jsx/-/plugin-transform-react-jsx-7.25.9.tgz#06367940d8325b36edff5e2b9cbe782947ca4166" + integrity sha512-s5XwpQYCqGerXl+Pu6VDL3x0j2d82eiV77UJ8a2mDHAW7j9SWRqQ2y1fNo1Z74CdcYipl5Z41zvjj4Nfzq36rw== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/plugin-syntax-jsx" "^7.25.9" + "@babel/types" "^7.25.9" + "@babel/plugin-transform-react-pure-annotations@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.18.6.tgz" @@ -1005,186 +1034,212 @@ "@babel/helper-annotate-as-pure" "^7.18.6" "@babel/helper-plugin-utils" "^7.18.6" -"@babel/plugin-transform-regenerator@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.18.6.tgz" - integrity sha512-poqRI2+qiSdeldcz4wTSTXBRryoq3Gc70ye7m7UD5Ww0nE29IXqMl6r7Nd15WBgRd74vloEMlShtH6CKxVzfmQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - regenerator-transform "^0.15.0" - -"@babel/plugin-transform-reserved-words@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.18.6.tgz" - integrity sha512-oX/4MyMoypzHjFrT1CdivfKZ+XvIPMFXwwxHp/r0Ddy2Vuomt4HDFGmft1TAY2yiTKiNSsh3kjBAzcM8kSdsjA== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-runtime@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.18.6.tgz" - integrity sha512-8uRHk9ZmRSnWqUgyae249EJZ94b0yAGLBIqzZzl+0iEdbno55Pmlt/32JZsHwXD9k/uZj18Aqqk35wBX4CBTXA== - dependencies: - "@babel/helper-module-imports" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - babel-plugin-polyfill-corejs2 "^0.3.1" - babel-plugin-polyfill-corejs3 "^0.5.2" - babel-plugin-polyfill-regenerator "^0.3.1" - semver "^6.3.0" - -"@babel/plugin-transform-shorthand-properties@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.18.6.tgz" - integrity sha512-eCLXXJqv8okzg86ywZJbRn19YJHU4XUa55oz2wbHhaQVn/MM+XhukiT7SYqp/7o00dg52Rj51Ny+Ecw4oyoygw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-spread@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.18.6.tgz" - integrity sha512-ayT53rT/ENF8WWexIRg9AiV9h0aIteyWn5ptfZTZQrjk/+f3WdrJGCY4c9wcgl2+MKkKPhzbYp97FTsquZpDCw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-skip-transparent-expression-wrappers" "^7.18.6" - -"@babel/plugin-transform-sticky-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.18.6.tgz" - integrity sha512-kfiDrDQ+PBsQDO85yj1icueWMfGfJFKN1KCkndygtu/C9+XUfydLC8Iv5UYJqRwy4zk8EcplRxEOeLyjq1gm6Q== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-template-literals@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.18.6.tgz" - integrity sha512-UuqlRrQmT2SWRvahW46cGSany0uTlcj8NYOS5sRGYi8FxPYPoLd5DDmMd32ZXEj2Jq+06uGVQKHxa/hJx2EzKw== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-typeof-symbol@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.18.6.tgz" - integrity sha512-7m71iS/QhsPk85xSjFPovHPcH3H9qeyzsujhTc+vcdnsXavoWYJ74zx0lP5RhpC5+iDnVLO+PPMHzC11qels1g== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - -"@babel/plugin-transform-typescript@^7.18.6": - version "7.18.8" - resolved "https://registry.npmjs.org/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.18.8.tgz" - integrity sha512-p2xM8HI83UObjsZGofMV/EdYjamsDm6MoN3hXPYIT0+gxIoopE+B7rPYKAxfrz9K9PK7JafTTjqYC6qipLExYA== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/plugin-syntax-typescript" "^7.18.6" - -"@babel/plugin-transform-unicode-escapes@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.18.6.tgz" - integrity sha512-XNRwQUXYMP7VLuy54cr/KS/WeL3AZeORhrmeZ7iewgu+X2eBqmpaLI/hzqr9ZxCeUoq0ASK4GUzSM0BDhZkLFw== +"@babel/plugin-transform-react-pure-annotations@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-react-pure-annotations/-/plugin-transform-react-pure-annotations-7.25.9.tgz#ea1c11b2f9dbb8e2d97025f43a3b5bc47e18ae62" + integrity sha512-KQ/Takk3T8Qzj5TppkS1be588lkbTp5uj7w6a0LeQaTMSckU/wK0oJ/pih+T690tkgI5jfmg2TqDJvd41Sj1Cg== dependencies: - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/plugin-transform-unicode-regex@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.18.6.tgz" - integrity sha512-gE7A6Lt7YLnNOL3Pb9BNeZvi+d8l7tcRrG4+pwJjK9hD2xX4mEvjlQW60G9EEmfXVYRPv9VRQcyegIVHCql/AA== +"@babel/plugin-transform-regenerator@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.25.9.tgz#03a8a4670d6cebae95305ac6defac81ece77740b" + integrity sha512-vwDcDNsgMPDGP0nMqzahDWE5/MLcX8sv96+wfX7as7LoF/kr97Bo/7fI00lXY4wUXYfVmwIIyG80fGZ1uvt2qg== dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" + "@babel/helper-plugin-utils" "^7.25.9" + regenerator-transform "^0.15.2" -"@babel/preset-env@^7.15.6", "@babel/preset-env@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.18.6.tgz" - integrity sha512-WrthhuIIYKrEFAwttYzgRNQ5hULGmwTj+D6l7Zdfsv5M7IWV/OZbUfbeL++Qrzx1nVJwWROIFhCHRYQV4xbPNw== +"@babel/plugin-transform-regexp-modifiers@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regexp-modifiers/-/plugin-transform-regexp-modifiers-7.26.0.tgz#2f5837a5b5cd3842a919d8147e9903cc7455b850" + integrity sha512-vN6saax7lrA2yA/Pak3sCxuD6F5InBjn9IcrIKQPjpsLvuHYLVroTxjdlVRHjjBWxKOqIwpTXDkOssYT4BFdRw== dependencies: - "@babel/compat-data" "^7.18.6" - "@babel/helper-compilation-targets" "^7.18.6" - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.18.6" - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.18.6" - "@babel/plugin-proposal-async-generator-functions" "^7.18.6" - "@babel/plugin-proposal-class-properties" "^7.18.6" - "@babel/plugin-proposal-class-static-block" "^7.18.6" - "@babel/plugin-proposal-dynamic-import" "^7.18.6" - "@babel/plugin-proposal-export-namespace-from" "^7.18.6" - "@babel/plugin-proposal-json-strings" "^7.18.6" - "@babel/plugin-proposal-logical-assignment-operators" "^7.18.6" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.18.6" - "@babel/plugin-proposal-numeric-separator" "^7.18.6" - "@babel/plugin-proposal-object-rest-spread" "^7.18.6" - "@babel/plugin-proposal-optional-catch-binding" "^7.18.6" - "@babel/plugin-proposal-optional-chaining" "^7.18.6" - "@babel/plugin-proposal-private-methods" "^7.18.6" - "@babel/plugin-proposal-private-property-in-object" "^7.18.6" - "@babel/plugin-proposal-unicode-property-regex" "^7.18.6" - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-class-properties" "^7.12.13" - "@babel/plugin-syntax-class-static-block" "^7.14.5" - "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-import-assertions" "^7.18.6" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - "@babel/plugin-syntax-private-property-in-object" "^7.14.5" - "@babel/plugin-syntax-top-level-await" "^7.14.5" - "@babel/plugin-transform-arrow-functions" "^7.18.6" - "@babel/plugin-transform-async-to-generator" "^7.18.6" - "@babel/plugin-transform-block-scoped-functions" "^7.18.6" - "@babel/plugin-transform-block-scoping" "^7.18.6" - "@babel/plugin-transform-classes" "^7.18.6" - "@babel/plugin-transform-computed-properties" "^7.18.6" - "@babel/plugin-transform-destructuring" "^7.18.6" - "@babel/plugin-transform-dotall-regex" "^7.18.6" - "@babel/plugin-transform-duplicate-keys" "^7.18.6" - "@babel/plugin-transform-exponentiation-operator" "^7.18.6" - "@babel/plugin-transform-for-of" "^7.18.6" - "@babel/plugin-transform-function-name" "^7.18.6" - "@babel/plugin-transform-literals" "^7.18.6" - "@babel/plugin-transform-member-expression-literals" "^7.18.6" - "@babel/plugin-transform-modules-amd" "^7.18.6" - "@babel/plugin-transform-modules-commonjs" "^7.18.6" - "@babel/plugin-transform-modules-systemjs" "^7.18.6" - "@babel/plugin-transform-modules-umd" "^7.18.6" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.18.6" - "@babel/plugin-transform-new-target" "^7.18.6" - "@babel/plugin-transform-object-super" "^7.18.6" - "@babel/plugin-transform-parameters" "^7.18.6" - "@babel/plugin-transform-property-literals" "^7.18.6" - "@babel/plugin-transform-regenerator" "^7.18.6" - "@babel/plugin-transform-reserved-words" "^7.18.6" - "@babel/plugin-transform-shorthand-properties" "^7.18.6" - "@babel/plugin-transform-spread" "^7.18.6" - "@babel/plugin-transform-sticky-regex" "^7.18.6" - "@babel/plugin-transform-template-literals" "^7.18.6" - "@babel/plugin-transform-typeof-symbol" "^7.18.6" - "@babel/plugin-transform-unicode-escapes" "^7.18.6" - "@babel/plugin-transform-unicode-regex" "^7.18.6" - "@babel/preset-modules" "^0.1.5" - "@babel/types" "^7.18.6" - babel-plugin-polyfill-corejs2 "^0.3.1" - babel-plugin-polyfill-corejs3 "^0.5.2" - babel-plugin-polyfill-regenerator "^0.3.1" - core-js-compat "^3.22.1" - semver "^6.3.0" + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" -"@babel/preset-modules@^0.1.5": - version "0.1.5" - resolved "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.5.tgz" - integrity sha512-A57th6YRG7oR3cq/yt/Y84MvGgE0eJG2F1JLhKuyG+jFxEgrd/HAMJatiFtmOiZurz+0DkrvbheCLaV5f2JfjA== +"@babel/plugin-transform-reserved-words@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.25.9.tgz#0398aed2f1f10ba3f78a93db219b27ef417fb9ce" + integrity sha512-7DL7DKYjn5Su++4RXu8puKZm2XBPHyjWLUidaPEkCUBbE7IPcsrkRHggAOOKydH1dASWdcUBxrkOGNxUv5P3Jg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-runtime@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.25.9.tgz#62723ea3f5b31ffbe676da9d6dae17138ae580ea" + integrity sha512-nZp7GlEl+yULJrClz0SwHPqir3lc0zsPrDHQUcxGspSL7AKrexNSEfTbfqnDNJUO13bgKyfuOLMF8Xqtu8j3YQ== + dependencies: + "@babel/helper-module-imports" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + babel-plugin-polyfill-corejs2 "^0.4.10" + babel-plugin-polyfill-corejs3 "^0.10.6" + babel-plugin-polyfill-regenerator "^0.6.1" + semver "^6.3.1" + +"@babel/plugin-transform-shorthand-properties@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.25.9.tgz#bb785e6091f99f826a95f9894fc16fde61c163f2" + integrity sha512-MUv6t0FhO5qHnS/W8XCbHmiRWOphNufpE1IVxhK5kuN3Td9FT1x4rx4K42s3RYdMXCXpfWkGSbCSd0Z64xA7Ng== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-spread@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.25.9.tgz#24a35153931b4ba3d13cec4a7748c21ab5514ef9" + integrity sha512-oNknIB0TbURU5pqJFVbOOFspVlrpVwo2H1+HUIsVDvp5VauGGDP1ZEvO8Nn5xyMEs3dakajOxlmkNW7kNgSm6A== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + +"@babel/plugin-transform-sticky-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.25.9.tgz#c7f02b944e986a417817b20ba2c504dfc1453d32" + integrity sha512-WqBUSgeVwucYDP9U/xNRQam7xV8W5Zf+6Eo7T2SRVUFlhRiMNFdFz58u0KZmCVVqs2i7SHgpRnAhzRNmKfi2uA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-template-literals@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.25.9.tgz#6dbd4a24e8fad024df76d1fac6a03cf413f60fe1" + integrity sha512-o97AE4syN71M/lxrCtQByzphAdlYluKPDBzDVzMmfCobUjjhAryZV0AIpRPrxN0eAkxXO6ZLEScmt+PNhj2OTw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-typeof-symbol@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.25.9.tgz#224ba48a92869ddbf81f9b4a5f1204bbf5a2bc4b" + integrity sha512-v61XqUMiueJROUv66BVIOi0Fv/CUuZuZMl5NkRoCVxLAnMexZ0A3kMe7vvZ0nulxMuMp0Mk6S5hNh48yki08ZA== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-typescript@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typescript/-/plugin-transform-typescript-7.25.9.tgz#69267905c2b33c2ac6d8fe765e9dc2ddc9df3849" + integrity sha512-7PbZQZP50tzv2KGGnhh82GSyMB01yKY9scIjf1a+GfZCtInOWqUH5+1EBU4t9fyR5Oykkkc9vFTs4OHrhHXljQ== + dependencies: + "@babel/helper-annotate-as-pure" "^7.25.9" + "@babel/helper-create-class-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-skip-transparent-expression-wrappers" "^7.25.9" + "@babel/plugin-syntax-typescript" "^7.25.9" + +"@babel/plugin-transform-unicode-escapes@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.25.9.tgz#a75ef3947ce15363fccaa38e2dd9bc70b2788b82" + integrity sha512-s5EDrE6bW97LtxOcGj1Khcx5AaXwiMmi4toFWRDP9/y0Woo6pXC+iyPu/KuhKtfSrNFd7jJB+/fkOtZy6aIC6Q== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-unicode-property-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.25.9.tgz#a901e96f2c1d071b0d1bb5dc0d3c880ce8f53dd3" + integrity sha512-Jt2d8Ga+QwRluxRQ307Vlxa6dMrYEMZCgGxoPR8V52rxPyldHu3hdlHspxaqYmE7oID5+kB+UKUB/eWS+DkkWg== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-unicode-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.25.9.tgz#5eae747fe39eacf13a8bd006a4fb0b5d1fa5e9b1" + integrity sha512-yoxstj7Rg9dlNn9UQxzk4fcNivwv4nUYz7fYXBaKxvw/lnmPuOm/ikoELygbYq68Bls3D/D+NBPHiLwZdZZ4HA== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/plugin-transform-unicode-sets-regex@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.25.9.tgz#65114c17b4ffc20fa5b163c63c70c0d25621fabe" + integrity sha512-8BYqO3GeVNHtx69fdPshN3fnzUNLrWdHhk/icSwigksJGczKSizZ+Z6SBCxTs723Fr5VSNorTIK7a+R2tISvwQ== + dependencies: + "@babel/helper-create-regexp-features-plugin" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + +"@babel/preset-env@^7.20.2", "@babel/preset-env@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.26.0.tgz#30e5c6bc1bcc54865bff0c5a30f6d4ccdc7fa8b1" + integrity sha512-H84Fxq0CQJNdPFT2DrfnylZ3cf5K43rGfWK4LJGPpjKHiZlk0/RzwEus3PDDZZg+/Er7lCA03MVacueUuXdzfw== + dependencies: + "@babel/compat-data" "^7.26.0" + "@babel/helper-compilation-targets" "^7.25.9" + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-bugfix-firefox-class-in-computed-class-key" "^7.25.9" + "@babel/plugin-bugfix-safari-class-field-initializer-scope" "^7.25.9" + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression" "^7.25.9" + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining" "^7.25.9" + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly" "^7.25.9" + "@babel/plugin-proposal-private-property-in-object" "7.21.0-placeholder-for-preset-env.2" + "@babel/plugin-syntax-import-assertions" "^7.26.0" + "@babel/plugin-syntax-import-attributes" "^7.26.0" + "@babel/plugin-syntax-unicode-sets-regex" "^7.18.6" + "@babel/plugin-transform-arrow-functions" "^7.25.9" + "@babel/plugin-transform-async-generator-functions" "^7.25.9" + "@babel/plugin-transform-async-to-generator" "^7.25.9" + "@babel/plugin-transform-block-scoped-functions" "^7.25.9" + "@babel/plugin-transform-block-scoping" "^7.25.9" + "@babel/plugin-transform-class-properties" "^7.25.9" + "@babel/plugin-transform-class-static-block" "^7.26.0" + "@babel/plugin-transform-classes" "^7.25.9" + "@babel/plugin-transform-computed-properties" "^7.25.9" + "@babel/plugin-transform-destructuring" "^7.25.9" + "@babel/plugin-transform-dotall-regex" "^7.25.9" + "@babel/plugin-transform-duplicate-keys" "^7.25.9" + "@babel/plugin-transform-duplicate-named-capturing-groups-regex" "^7.25.9" + "@babel/plugin-transform-dynamic-import" "^7.25.9" + "@babel/plugin-transform-exponentiation-operator" "^7.25.9" + "@babel/plugin-transform-export-namespace-from" "^7.25.9" + "@babel/plugin-transform-for-of" "^7.25.9" + "@babel/plugin-transform-function-name" "^7.25.9" + "@babel/plugin-transform-json-strings" "^7.25.9" + "@babel/plugin-transform-literals" "^7.25.9" + "@babel/plugin-transform-logical-assignment-operators" "^7.25.9" + "@babel/plugin-transform-member-expression-literals" "^7.25.9" + "@babel/plugin-transform-modules-amd" "^7.25.9" + "@babel/plugin-transform-modules-commonjs" "^7.25.9" + "@babel/plugin-transform-modules-systemjs" "^7.25.9" + "@babel/plugin-transform-modules-umd" "^7.25.9" + "@babel/plugin-transform-named-capturing-groups-regex" "^7.25.9" + "@babel/plugin-transform-new-target" "^7.25.9" + "@babel/plugin-transform-nullish-coalescing-operator" "^7.25.9" + "@babel/plugin-transform-numeric-separator" "^7.25.9" + "@babel/plugin-transform-object-rest-spread" "^7.25.9" + "@babel/plugin-transform-object-super" "^7.25.9" + "@babel/plugin-transform-optional-catch-binding" "^7.25.9" + "@babel/plugin-transform-optional-chaining" "^7.25.9" + "@babel/plugin-transform-parameters" "^7.25.9" + "@babel/plugin-transform-private-methods" "^7.25.9" + "@babel/plugin-transform-private-property-in-object" "^7.25.9" + "@babel/plugin-transform-property-literals" "^7.25.9" + "@babel/plugin-transform-regenerator" "^7.25.9" + "@babel/plugin-transform-regexp-modifiers" "^7.26.0" + "@babel/plugin-transform-reserved-words" "^7.25.9" + "@babel/plugin-transform-shorthand-properties" "^7.25.9" + "@babel/plugin-transform-spread" "^7.25.9" + "@babel/plugin-transform-sticky-regex" "^7.25.9" + "@babel/plugin-transform-template-literals" "^7.25.9" + "@babel/plugin-transform-typeof-symbol" "^7.25.9" + "@babel/plugin-transform-unicode-escapes" "^7.25.9" + "@babel/plugin-transform-unicode-property-regex" "^7.25.9" + "@babel/plugin-transform-unicode-regex" "^7.25.9" + "@babel/plugin-transform-unicode-sets-regex" "^7.25.9" + "@babel/preset-modules" "0.1.6-no-external-plugins" + babel-plugin-polyfill-corejs2 "^0.4.10" + babel-plugin-polyfill-corejs3 "^0.10.6" + babel-plugin-polyfill-regenerator "^0.6.1" + core-js-compat "^3.38.1" + semver "^6.3.1" + +"@babel/preset-modules@0.1.6-no-external-plugins": + version "0.1.6-no-external-plugins" + resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz#ccb88a2c49c817236861fee7826080573b8a923a" + integrity sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA== dependencies: "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" "@babel/types" "^7.4.4" esutils "^2.0.2" -"@babel/preset-react@^7.14.5", "@babel/preset-react@^7.18.6": +"@babel/preset-react@^7.18.6": version "7.18.6" resolved "https://registry.npmjs.org/@babel/preset-react/-/preset-react-7.18.6.tgz" integrity sha512-zXr6atUmyYdiWRVLOZahakYmOBHtWc2WGCkP8PYTgZi0iJXDY2CN180TdrIW4OGOAdLc7TifzDIvtx6izaRIzg== @@ -1196,62 +1251,82 @@ "@babel/plugin-transform-react-jsx-development" "^7.18.6" "@babel/plugin-transform-react-pure-annotations" "^7.18.6" -"@babel/preset-typescript@^7.15.0", "@babel/preset-typescript@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/preset-typescript/-/preset-typescript-7.18.6.tgz" - integrity sha512-s9ik86kXBAnD760aybBucdpnLsAt0jK1xqJn2juOn9lkOvSHV60os5hxoVJsPzMQxvnUJFAlkont2DvvaYEBtQ== - dependencies: - "@babel/helper-plugin-utils" "^7.18.6" - "@babel/helper-validator-option" "^7.18.6" - "@babel/plugin-transform-typescript" "^7.18.6" - -"@babel/runtime-corejs3@^7.18.6": - version "7.18.6" - resolved "https://registry.npmjs.org/@babel/runtime-corejs3/-/runtime-corejs3-7.18.6.tgz" - integrity sha512-cOu5wH2JFBgMjje+a+fz2JNIWU4GzYpl05oSob3UDvBEh6EuIn+TXFHMmBbhSb+k/4HMzgKCQfEEDArAWNF9Cw== - dependencies: - core-js-pure "^3.20.2" - regenerator-runtime "^0.13.4" +"@babel/preset-react@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/preset-react/-/preset-react-7.25.9.tgz#5f473035dc2094bcfdbc7392d0766bd42dce173e" + integrity sha512-D3to0uSPiWE7rBrdIICCd0tJSIGpLaaGptna2+w7Pft5xMqLpA1sz99DK5TZ1TjGbdQ/VI1eCSZ06dv3lT4JOw== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-transform-react-display-name" "^7.25.9" + "@babel/plugin-transform-react-jsx" "^7.25.9" + "@babel/plugin-transform-react-jsx-development" "^7.25.9" + "@babel/plugin-transform-react-pure-annotations" "^7.25.9" + +"@babel/preset-typescript@^7.21.0", "@babel/preset-typescript@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/preset-typescript/-/preset-typescript-7.26.0.tgz#4a570f1b8d104a242d923957ffa1eaff142a106d" + integrity sha512-NMk1IGZ5I/oHhoXEElcm+xUnL/szL6xflkFZmoEU9xj1qSJXpiS7rsspYo92B4DRCDvZn2erT5LdsCeXAKNCkg== + dependencies: + "@babel/helper-plugin-utils" "^7.25.9" + "@babel/helper-validator-option" "^7.25.9" + "@babel/plugin-syntax-jsx" "^7.25.9" + "@babel/plugin-transform-modules-commonjs" "^7.25.9" + "@babel/plugin-transform-typescript" "^7.25.9" + +"@babel/runtime-corejs3@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/runtime-corejs3/-/runtime-corejs3-7.26.0.tgz#5af6bed16073eb4a0191233d61e158a5c768c430" + integrity sha512-YXHu5lN8kJCb1LOb9PgV6pvak43X2h4HvRApcN5SdWeaItQOzfn1hgP6jasD6KWQyJDBxrVmA9o9OivlnNJK/w== + dependencies: + core-js-pure "^3.30.2" + regenerator-runtime "^0.14.0" -"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.1", "@babel/runtime@^7.12.13", "@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.18.6", "@babel/runtime@^7.23.4", "@babel/runtime@^7.23.5", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7": +"@babel/runtime@^7.1.2", "@babel/runtime@^7.10.3", "@babel/runtime@^7.12.13", "@babel/runtime@^7.23.4", "@babel/runtime@^7.5.5", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7": version "7.23.5" resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.23.5.tgz" integrity sha512-NdUTHcPe4C99WxPub+K9l9tK5/lV4UXIoaHSYgzco9BCyjKAAwzdBI+wWtYqHt7LJdbo74ZjRPJgzVweq1sz0w== dependencies: regenerator-runtime "^0.14.0" -"@babel/template@^7.12.7", "@babel/template@^7.18.6", "@babel/template@^7.20.7": - version "7.20.7" - resolved "https://registry.npmjs.org/@babel/template/-/template-7.20.7.tgz" - integrity sha512-8SegXApWe6VoNw0r9JHpSteLKTpTiLZ4rMlGIm9JQ18KiCtyQiAMEazujAHrUS5flrcqYZa75ukev3P6QmUwUw== - dependencies: - "@babel/code-frame" "^7.18.6" - "@babel/parser" "^7.20.7" - "@babel/types" "^7.20.7" - -"@babel/template@^7.25.0": - version "7.25.0" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.0.tgz#e733dc3134b4fede528c15bc95e89cb98c52592a" - integrity sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q== - dependencies: - "@babel/code-frame" "^7.24.7" - "@babel/parser" "^7.25.0" - "@babel/types" "^7.25.0" - -"@babel/traverse@^7.12.9", "@babel/traverse@^7.13.0", "@babel/traverse@^7.18.6", "@babel/traverse@^7.18.8", "@babel/traverse@^7.21.0", "@babel/traverse@^7.21.2", "@babel/traverse@^7.21.4": - version "7.25.4" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.25.4.tgz#648678046990f2957407e3086e97044f13c3e18e" - integrity sha512-VJ4XsrD+nOvlXyLzmLzUs/0qjFS4sK30te5yEFlvbbUNEgKaVb2BHZUpAL+ttLPQAHNrsI3zZisbfha5Cvr8vg== - dependencies: - "@babel/code-frame" "^7.24.7" - "@babel/generator" "^7.25.4" - "@babel/parser" "^7.25.4" - "@babel/template" "^7.25.0" - "@babel/types" "^7.25.4" +"@babel/runtime@^7.12.5", "@babel/runtime@^7.18.3", "@babel/runtime@^7.23.5", "@babel/runtime@^7.25.9": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.26.0.tgz#8600c2f595f277c60815256418b85356a65173c1" + integrity sha512-FDSOghenHTiToteC/QRlv2q3DhPZ/oOXTBoirfWNx1Cx3TMVcGWQtMMmQcSvb/JjpNeGzx8Pq/b4fKEJuWm1sw== + dependencies: + regenerator-runtime "^0.14.0" + +"@babel/template@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.25.9.tgz#ecb62d81a8a6f5dc5fe8abfc3901fc52ddf15016" + integrity sha512-9DGttpmPvIxBb/2uwpVo3dqJ+O6RooAFOS+lB+xDqoE2PVCE8nfoHMdZLpfCQRLwvohzXISPZcgxt80xLfsuwg== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/types" "^7.25.9" + +"@babel/traverse@^7.25.9": + version "7.25.9" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.25.9.tgz#a50f8fe49e7f69f53de5bea7e413cd35c5e13c84" + integrity sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw== + dependencies: + "@babel/code-frame" "^7.25.9" + "@babel/generator" "^7.25.9" + "@babel/parser" "^7.25.9" + "@babel/template" "^7.25.9" + "@babel/types" "^7.25.9" debug "^4.3.1" globals "^11.1.0" -"@babel/types@^7.12.7", "@babel/types@^7.15.6", "@babel/types@^7.18.6", "@babel/types@^7.20.2", "@babel/types@^7.20.7", "@babel/types@^7.21.0", "@babel/types@^7.21.2", "@babel/types@^7.21.4", "@babel/types@^7.4.4": +"@babel/types@^7.18.6", "@babel/types@^7.21.3", "@babel/types@^7.25.9", "@babel/types@^7.26.0": + version "7.26.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.26.0.tgz#deabd08d6b753bc8e0f198f8709fb575e31774ff" + integrity sha512-Z/yiTPj+lDVnF7lWeKCIJzaIkI0vYO87dMpZ4bg4TDrFe4XXLFWL1TbXU27gBP3QccxV9mZICCrnjnYlJjXHOA== + dependencies: + "@babel/helper-string-parser" "^7.25.9" + "@babel/helper-validator-identifier" "^7.25.9" + +"@babel/types@^7.4.4": version "7.21.4" resolved "https://registry.npmjs.org/@babel/types/-/types-7.21.4.tgz" integrity sha512-rU2oY501qDxE8Pyo7i/Orqma4ziCOrby0/9mvbDUGEfvZjb279Nk9k19e2fiCxHbRRpY2ZyrgW1eq22mvmOIzA== @@ -1260,456 +1335,483 @@ "@babel/helper-validator-identifier" "^7.19.1" to-fast-properties "^2.0.0" -"@babel/types@^7.25.0", "@babel/types@^7.25.4": - version "7.25.4" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.25.4.tgz#6bcb46c72fdf1012a209d016c07f769e10adcb5f" - integrity sha512-zQ1ijeeCXVEh+aNL0RlmkPkG8HUiDcU2pzQQFjtbntgAczRASFzj4H+6+bV+dy1ntKR14I/DypeuRG1uma98iQ== - dependencies: - "@babel/helper-string-parser" "^7.24.8" - "@babel/helper-validator-identifier" "^7.24.7" - to-fast-properties "^2.0.0" - "@colors/colors@1.5.0": version "1.5.0" resolved "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz" integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== -"@docsearch/css@3.1.1": - version "3.1.1" - resolved "https://registry.npmjs.org/@docsearch/css/-/css-3.1.1.tgz" - integrity sha512-utLgg7E1agqQeqCJn05DWC7XXMk4tMUUnL7MZupcknRu2OzGN13qwey2qA/0NAKkVBGugiWtON0+rlU0QIPojg== +"@discoveryjs/json-ext@0.5.7": + version "0.5.7" + resolved "https://registry.yarnpkg.com/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz#1d572bfbbe14b7704e0ba0f39b74815b84870d70" + integrity sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw== -"@docsearch/react@^3.1.1": - version "3.1.1" - resolved "https://registry.npmjs.org/@docsearch/react/-/react-3.1.1.tgz" - integrity sha512-cfoql4qvtsVRqBMYxhlGNpvyy/KlCoPqjIsJSZYqYf9AplZncKjLBTcwBu6RXFMVCe30cIFljniI4OjqAU67pQ== +"@docsearch/css@3.6.3": + version "3.6.3" + resolved "https://registry.yarnpkg.com/@docsearch/css/-/css-3.6.3.tgz#d787cc9d27a7e67305fa47d668656eb2e64c4526" + integrity sha512-3uvbg8E7rhqE1C4oBAK3tGlS2qfhi9zpfZgH/yjDPF73vd9B41urVIKujF4rczcF4E3qs34SedhehiDJ4UdNBA== + +"@docsearch/react@^3.5.2": + version "3.6.3" + resolved "https://registry.yarnpkg.com/@docsearch/react/-/react-3.6.3.tgz#326a0811306060bfb481df3cd0db51adaa9f737c" + integrity sha512-2munr4uBuZq1PG+Ge+F+ldIdxb3Wi8OmEIv2tQQb4RvEvvph+xtQkxwHzVIEnt5s+HecwucuXwB+3JhcZboFLg== dependencies: - "@algolia/autocomplete-core" "1.7.1" - "@algolia/autocomplete-preset-algolia" "1.7.1" - "@docsearch/css" "3.1.1" - algoliasearch "^4.0.0" + "@algolia/autocomplete-core" "1.9.3" + "@algolia/autocomplete-preset-algolia" "1.17.6" + "@docsearch/css" "3.6.3" + algoliasearch "^5.11.0" -"@docusaurus/core@2.4.0", "@docusaurus/core@^2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/core/-/core-2.4.0.tgz" - integrity sha512-J55/WEoIpRcLf3afO5POHPguVZosKmJEQWKBL+K7TAnfuE7i+Y0NPLlkKtnWCehagGsgTqClfQEexH/UT4kELA== +"@docusaurus/babel@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/babel/-/babel-3.6.0.tgz#735a003207925bd782dd08ffa5d8b3503c1f8d72" + integrity sha512-7CsoQFiadoq7AHSUIQNkI/lGfg9AQ2ZBzsf9BqfZGXkHwWDy6twuohEaG0PgQv1npSRSAB2dioVxhRSErnqKNA== dependencies: - "@babel/core" "^7.18.6" - "@babel/generator" "^7.18.7" + "@babel/core" "^7.25.9" + "@babel/generator" "^7.25.9" "@babel/plugin-syntax-dynamic-import" "^7.8.3" - "@babel/plugin-transform-runtime" "^7.18.6" - "@babel/preset-env" "^7.18.6" - "@babel/preset-react" "^7.18.6" - "@babel/preset-typescript" "^7.18.6" - "@babel/runtime" "^7.18.6" - "@babel/runtime-corejs3" "^7.18.6" - "@babel/traverse" "^7.18.8" - "@docusaurus/cssnano-preset" "2.4.0" - "@docusaurus/logger" "2.4.0" - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-common" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - "@slorber/static-site-generator-webpack-plugin" "^4.0.7" - "@svgr/webpack" "^6.2.1" - autoprefixer "^10.4.7" - babel-loader "^8.2.5" + "@babel/plugin-transform-runtime" "^7.25.9" + "@babel/preset-env" "^7.25.9" + "@babel/preset-react" "^7.25.9" + "@babel/preset-typescript" "^7.25.9" + "@babel/runtime" "^7.25.9" + "@babel/runtime-corejs3" "^7.25.9" + "@babel/traverse" "^7.25.9" + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" babel-plugin-dynamic-import-node "^2.3.3" + fs-extra "^11.1.1" + tslib "^2.6.0" + +"@docusaurus/bundler@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/bundler/-/bundler-3.6.0.tgz#bdd060ba4d009211348e4e973a3bf4861cf0996b" + integrity sha512-o5T9HXkPKH0OQAifTxEXaebcO8kaz3tU1+wlIShZ2DKJHlsyWX3N4rToWBHroWnV/ZCT2XN3kLRzXASqrnb9Tw== + dependencies: + "@babel/core" "^7.25.9" + "@docusaurus/babel" "3.6.0" + "@docusaurus/cssnano-preset" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + autoprefixer "^10.4.14" + babel-loader "^9.2.1" + clean-css "^5.3.2" + copy-webpack-plugin "^11.0.0" + css-loader "^6.8.1" + css-minimizer-webpack-plugin "^5.0.1" + cssnano "^6.1.2" + file-loader "^6.2.0" + html-minifier-terser "^7.2.0" + mini-css-extract-plugin "^2.9.1" + null-loader "^4.0.1" + postcss "^8.4.26" + postcss-loader "^7.3.3" + react-dev-utils "^12.0.1" + terser-webpack-plugin "^5.3.9" + tslib "^2.6.0" + url-loader "^4.1.1" + webpack "^5.95.0" + webpackbar "^6.0.1" + +"@docusaurus/core@3.6.0", "@docusaurus/core@^3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/core/-/core-3.6.0.tgz#b23fc7e253a49cc3e5ac9e091354f497cc0b101b" + integrity sha512-lvRgMoKJJSRDt9+HhAqFcICV4kp/mw1cJJrLxIw4Q2XZnFGM1XUuwcbuaqWmGog+NcOLZaPCcCtZbn60EMCtjQ== + dependencies: + "@docusaurus/babel" "3.6.0" + "@docusaurus/bundler" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" boxen "^6.2.1" chalk "^4.1.2" chokidar "^3.5.3" - clean-css "^5.3.0" - cli-table3 "^0.6.2" + cli-table3 "^0.6.3" combine-promises "^1.1.0" commander "^5.1.0" - copy-webpack-plugin "^11.0.0" - core-js "^3.23.3" - css-loader "^6.7.1" - css-minimizer-webpack-plugin "^4.0.0" - cssnano "^5.1.12" + core-js "^3.31.1" del "^6.1.1" - detect-port "^1.3.0" + detect-port "^1.5.1" escape-html "^1.0.3" - eta "^2.0.0" - file-loader "^6.2.0" - fs-extra "^10.1.0" - html-minifier-terser "^6.1.0" - html-tags "^3.2.0" - html-webpack-plugin "^5.5.0" - import-fresh "^3.3.0" + eta "^2.2.0" + eval "^0.1.8" + fs-extra "^11.1.1" + html-tags "^3.3.1" + html-webpack-plugin "^5.6.0" leven "^3.1.0" lodash "^4.17.21" - mini-css-extract-plugin "^2.6.1" - postcss "^8.4.14" - postcss-loader "^7.0.0" + p-map "^4.0.0" prompts "^2.4.2" react-dev-utils "^12.0.1" react-helmet-async "^1.3.0" - react-loadable "npm:@docusaurus/react-loadable@5.5.2" + react-loadable "npm:@docusaurus/react-loadable@6.0.0" react-loadable-ssr-addon-v5-slorber "^1.0.1" - react-router "^5.3.3" + react-router "^5.3.4" react-router-config "^5.1.1" - react-router-dom "^5.3.3" + react-router-dom "^5.3.4" rtl-detect "^1.0.4" - semver "^7.3.7" - serve-handler "^6.1.3" + semver "^7.5.4" + serve-handler "^6.1.6" shelljs "^0.8.5" - terser-webpack-plugin "^5.3.3" - tslib "^2.4.0" - update-notifier "^5.1.0" - url-loader "^4.1.1" - wait-on "^6.0.1" - webpack "^5.73.0" - webpack-bundle-analyzer "^4.5.0" - webpack-dev-server "^4.9.3" - webpack-merge "^5.8.0" - webpackbar "^5.0.2" - -"@docusaurus/cssnano-preset@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/cssnano-preset/-/cssnano-preset-2.4.0.tgz" - integrity sha512-RmdiA3IpsLgZGXRzqnmTbGv43W4OD44PCo+6Q/aYjEM2V57vKCVqNzuafE94jv0z/PjHoXUrjr69SaRymBKYYw== + tslib "^2.6.0" + update-notifier "^6.0.2" + webpack "^5.95.0" + webpack-bundle-analyzer "^4.10.2" + webpack-dev-server "^4.15.2" + webpack-merge "^6.0.1" + +"@docusaurus/cssnano-preset@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/cssnano-preset/-/cssnano-preset-3.6.0.tgz#02378e53e9568ed5fc8871d4fc158ea96fd7421c" + integrity sha512-h3jlOXqqzNSoU+C4CZLNpFtD+v2xr1UBf4idZpwMgqid9r6lb5GS7tWKnQnauio6OipacbHbDXEX3JyT1PlDkg== dependencies: - cssnano-preset-advanced "^5.3.8" - postcss "^8.4.14" - postcss-sort-media-queries "^4.2.1" - tslib "^2.4.0" + cssnano-preset-advanced "^6.1.2" + postcss "^8.4.38" + postcss-sort-media-queries "^5.2.0" + tslib "^2.6.0" -"@docusaurus/logger@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/logger/-/logger-2.4.0.tgz" - integrity sha512-T8+qR4APN+MjcC9yL2Es+xPJ2923S9hpzDmMtdsOcUGLqpCGBbU1vp3AAqDwXtVgFkq+NsEk7sHdVsfLWR/AXw== +"@docusaurus/logger@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/logger/-/logger-3.6.0.tgz#c7349c2636087f55f573a60a3c7f69b87d59974d" + integrity sha512-BcQhoXilXW0607cH/kO6P5Gt5KxCGfoJ+QDKNf3yO2S09/RsITlW+0QljXPbI3DklTrHrhRDmgGk1yX4nUhWTA== dependencies: chalk "^4.1.2" - tslib "^2.4.0" + tslib "^2.6.0" -"@docusaurus/mdx-loader@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/mdx-loader/-/mdx-loader-2.4.0.tgz" - integrity sha512-GWoH4izZKOmFoC+gbI2/y8deH/xKLvzz/T5BsEexBye8EHQlwsA7FMrVa48N063bJBH4FUOiRRXxk5rq9cC36g== - dependencies: - "@babel/parser" "^7.18.8" - "@babel/traverse" "^7.18.8" - "@docusaurus/logger" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@mdx-js/mdx" "^1.6.22" +"@docusaurus/mdx-loader@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/mdx-loader/-/mdx-loader-3.6.0.tgz#f8ba7af9d59473a7182f6a9307e0432f8dce905b" + integrity sha512-GhRzL1Af/AdSSrGesSPOU/iP/aXadTGmVKuysCxZDrQR2RtBtubQZ9aw+KvdFVV7R4K/CsbgD6J5oqrXlEPk3Q== + dependencies: + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + "@mdx-js/mdx" "^3.0.0" + "@slorber/remark-comment" "^1.0.0" escape-html "^1.0.3" + estree-util-value-to-estree "^3.0.1" file-loader "^6.2.0" - fs-extra "^10.1.0" - image-size "^1.0.1" - mdast-util-to-string "^2.0.0" - remark-emoji "^2.2.0" + fs-extra "^11.1.1" + image-size "^1.0.2" + mdast-util-mdx "^3.0.0" + mdast-util-to-string "^4.0.0" + rehype-raw "^7.0.0" + remark-directive "^3.0.0" + remark-emoji "^4.0.0" + remark-frontmatter "^5.0.0" + remark-gfm "^4.0.0" stringify-object "^3.3.0" - tslib "^2.4.0" - unified "^9.2.2" - unist-util-visit "^2.0.3" + tslib "^2.6.0" + unified "^11.0.3" + unist-util-visit "^5.0.0" url-loader "^4.1.1" - webpack "^5.73.0" + vfile "^6.0.1" + webpack "^5.88.1" -"@docusaurus/module-type-aliases@2.4.0", "@docusaurus/module-type-aliases@^2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/module-type-aliases/-/module-type-aliases-2.4.0.tgz" - integrity sha512-YEQO2D3UXs72qCn8Cr+RlycSQXVGN9iEUyuHwTuK4/uL/HFomB2FHSU0vSDM23oLd+X/KibQ3Ez6nGjQLqXcHg== +"@docusaurus/module-type-aliases@3.6.0", "@docusaurus/module-type-aliases@^3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/module-type-aliases/-/module-type-aliases-3.6.0.tgz#44083c34a53db1dde06364b4e7f2d144fa2d5394" + integrity sha512-szTrIN/6/fuk0xkf3XbRfdTFJzRQ8d1s3sQj5++58wltrT7v3yn1149oc9ryYjMpRcbsarGloQwMu7ofPe4XPg== dependencies: - "@docusaurus/react-loadable" "5.5.2" - "@docusaurus/types" "2.4.0" + "@docusaurus/types" "3.6.0" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" "@types/react-router-dom" "*" react-helmet-async "*" - react-loadable "npm:@docusaurus/react-loadable@5.5.2" + react-loadable "npm:@docusaurus/react-loadable@6.0.0" -"@docusaurus/plugin-content-blog@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-content-blog/-/plugin-content-blog-2.4.0.tgz" - integrity sha512-YwkAkVUxtxoBAIj/MCb4ohN0SCtHBs4AS75jMhPpf67qf3j+U/4n33cELq7567hwyZ6fMz2GPJcVmctzlGGThQ== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/logger" "2.4.0" - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-common" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - cheerio "^1.0.0-rc.12" +"@docusaurus/plugin-content-blog@3.6.0", "@docusaurus/plugin-content-blog@^3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-blog/-/plugin-content-blog-3.6.0.tgz#9128175b4c3ce885d9090183d74c60813844ea8d" + integrity sha512-o4aT1/E0Ldpzs/hQff5uyoSriAhS/yqBhqSn+fvSw465AaqRsva6O7CZSYleuBq6x2bewyE3QJq2PcTiHhAd8g== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/theme-common" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + cheerio "1.0.0-rc.12" feed "^4.2.2" - fs-extra "^10.1.0" + fs-extra "^11.1.1" lodash "^4.17.21" reading-time "^1.5.0" - tslib "^2.4.0" - unist-util-visit "^2.0.3" + srcset "^4.0.0" + tslib "^2.6.0" + unist-util-visit "^5.0.0" utility-types "^3.10.0" - webpack "^5.73.0" + webpack "^5.88.1" -"@docusaurus/plugin-content-docs@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-content-docs/-/plugin-content-docs-2.4.0.tgz" - integrity sha512-ic/Z/ZN5Rk/RQo+Io6rUGpToOtNbtPloMR2JcGwC1xT2riMu6zzfSwmBi9tHJgdXH6CB5jG+0dOZZO8QS5tmDg== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/logger" "2.4.0" - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/module-type-aliases" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - "@types/react-router-config" "^5.0.6" +"@docusaurus/plugin-content-docs@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-docs/-/plugin-content-docs-3.6.0.tgz#15cae4bf81da0b0ddce09d53b10b7209116ea9c2" + integrity sha512-c5gZOxocJKO/Zev2MEZInli+b+VNswDGuKHE6QtFgidhAJonwjh2kwj967RvWFaMMk62HlLJLZ+IGK2XsVy4Aw== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/module-type-aliases" "3.6.0" + "@docusaurus/theme-common" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + "@types/react-router-config" "^5.0.7" combine-promises "^1.1.0" - fs-extra "^10.1.0" - import-fresh "^3.3.0" + fs-extra "^11.1.1" js-yaml "^4.1.0" lodash "^4.17.21" - tslib "^2.4.0" + tslib "^2.6.0" utility-types "^3.10.0" - webpack "^5.73.0" + webpack "^5.88.1" -"@docusaurus/plugin-content-pages@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-content-pages/-/plugin-content-pages-2.4.0.tgz" - integrity sha512-Pk2pOeOxk8MeU3mrTU0XLIgP9NZixbdcJmJ7RUFrZp1Aj42nd0RhIT14BGvXXyqb8yTQlk4DmYGAzqOfBsFyGw== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - fs-extra "^10.1.0" - tslib "^2.4.0" - webpack "^5.73.0" - -"@docusaurus/plugin-debug@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-debug/-/plugin-debug-2.4.0.tgz" - integrity sha512-KC56DdYjYT7Txyux71vXHXGYZuP6yYtqwClvYpjKreWIHWus5Zt6VNi23rMZv3/QKhOCrN64zplUbdfQMvddBQ== +"@docusaurus/plugin-content-pages@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-content-pages/-/plugin-content-pages-3.6.0.tgz#5dd284bf063baaba1e0305c90b1dd0d5acc7e466" + integrity sha512-RKHhJrfkadHc7+tt1cP48NWifOrhkSRMPdXNYytzhoQrXlP6Ph+3tfQ4/n+nT0S3Y9+wwRxYqRqA380ZLt+QtQ== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + fs-extra "^11.1.1" + tslib "^2.6.0" + webpack "^5.88.1" + +"@docusaurus/plugin-debug@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-debug/-/plugin-debug-3.6.0.tgz#0a6da9ba31a0acb176ae2762b4d6b96b1906c826" + integrity sha512-o8T1Rl94COLdSlKvjYLQpRJQRU8WWZ8EX1B0yV0dQLNN8reyH7MQW+6z1ig4sQFfH3pnjPWVGHfuEjcib5m7Eg== dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - fs-extra "^10.1.0" - react-json-view "^1.21.3" - tslib "^2.4.0" + "@docusaurus/core" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + fs-extra "^11.1.1" + react-json-view-lite "^1.2.0" + tslib "^2.6.0" -"@docusaurus/plugin-google-analytics@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-2.4.0.tgz" - integrity sha512-uGUzX67DOAIglygdNrmMOvEp8qG03X20jMWadeqVQktS6nADvozpSLGx4J0xbkblhJkUzN21WiilsP9iVP+zkw== +"@docusaurus/plugin-google-analytics@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-analytics/-/plugin-google-analytics-3.6.0.tgz#9e8245eef1bee95e44ef2af92ce3e844a8e93e64" + integrity sha512-kgRFbfpi6Hshj75YUztKyEMtI/kw0trPRwoTN4g+W1NK99R/vh8phTvhBTIMnDbetU79795LkwfG0rZ/ce6zWQ== dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - tslib "^2.4.0" + "@docusaurus/core" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + tslib "^2.6.0" -"@docusaurus/plugin-google-gtag@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-2.4.0.tgz" - integrity sha512-adj/70DANaQs2+TF/nRdMezDXFAV/O/pjAbUgmKBlyOTq5qoMe0Tk4muvQIwWUmiUQxFJe+sKlZGM771ownyOg== +"@docusaurus/plugin-google-gtag@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-gtag/-/plugin-google-gtag-3.6.0.tgz#bed8381fe3ab357d56a565f657e38d8ea6272703" + integrity sha512-nqu4IfjaO4UX+dojHL2BxHRS+sKj31CIMWYo49huQ3wTET0Oc3u/WGTaKd3ShTPDhkgiRhTOSTPUwJWrU55nHg== dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - tslib "^2.4.0" + "@docusaurus/core" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + "@types/gtag.js" "^0.0.12" + tslib "^2.6.0" -"@docusaurus/plugin-google-tag-manager@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-2.4.0.tgz" - integrity sha512-E66uGcYs4l7yitmp/8kMEVQftFPwV9iC62ORh47Veqzs6ExwnhzBkJmwDnwIysHBF1vlxnzET0Fl2LfL5fRR3A== +"@docusaurus/plugin-google-tag-manager@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-google-tag-manager/-/plugin-google-tag-manager-3.6.0.tgz#326382de05888ea4317837be736eabd635adbc71" + integrity sha512-OU6c5xI0nOVbEc9eImGvvsgNWe4vGm97t/W3aLHjWsHyNk3uwFNBQMHRvBUwAi9k/K3kyC5E7DWnc67REhdLOw== dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - tslib "^2.4.0" + "@docusaurus/core" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + tslib "^2.6.0" -"@docusaurus/plugin-sitemap@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/plugin-sitemap/-/plugin-sitemap-2.4.0.tgz" - integrity sha512-pZxh+ygfnI657sN8a/FkYVIAmVv0CGk71QMKqJBOfMmDHNN1FeDeFkBjWP49ejBqpqAhjufkv5UWq3UOu2soCw== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/logger" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-common" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - fs-extra "^10.1.0" +"@docusaurus/plugin-sitemap@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/plugin-sitemap/-/plugin-sitemap-3.6.0.tgz#c7c93f75f03391ca9071da48563fc4faa84966bc" + integrity sha512-YB5XMdf9FjLhgbHY/cDbYhVxsgcpPIjxY9769HUgFOB7GVzItTLOR71W035R1BiR2CA5QAn3XOSg36WLRxlhQQ== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + fs-extra "^11.1.1" sitemap "^7.1.1" - tslib "^2.4.0" - -"@docusaurus/preset-classic@^2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/preset-classic/-/preset-classic-2.4.0.tgz" - integrity sha512-/5z5o/9bc6+P5ool2y01PbJhoGddEGsC0ej1MF6mCoazk8A+kW4feoUd68l7Bnv01rCnG3xy7kHUQP97Y0grUA== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/plugin-content-blog" "2.4.0" - "@docusaurus/plugin-content-docs" "2.4.0" - "@docusaurus/plugin-content-pages" "2.4.0" - "@docusaurus/plugin-debug" "2.4.0" - "@docusaurus/plugin-google-analytics" "2.4.0" - "@docusaurus/plugin-google-gtag" "2.4.0" - "@docusaurus/plugin-google-tag-manager" "2.4.0" - "@docusaurus/plugin-sitemap" "2.4.0" - "@docusaurus/theme-classic" "2.4.0" - "@docusaurus/theme-common" "2.4.0" - "@docusaurus/theme-search-algolia" "2.4.0" - "@docusaurus/types" "2.4.0" - -"@docusaurus/react-loadable@5.5.2": - version "5.5.2" - resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" - integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== - dependencies: - "@types/react" "*" - prop-types "^15.6.2" + tslib "^2.6.0" -"@docusaurus/theme-classic@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/theme-classic/-/theme-classic-2.4.0.tgz" - integrity sha512-GMDX5WU6Z0OC65eQFgl3iNNEbI9IMJz9f6KnOyuMxNUR6q0qVLsKCNopFUDfFNJ55UU50o7P7o21yVhkwpfJ9w== - dependencies: - "@docusaurus/core" "2.4.0" - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/module-type-aliases" "2.4.0" - "@docusaurus/plugin-content-blog" "2.4.0" - "@docusaurus/plugin-content-docs" "2.4.0" - "@docusaurus/plugin-content-pages" "2.4.0" - "@docusaurus/theme-common" "2.4.0" - "@docusaurus/theme-translations" "2.4.0" - "@docusaurus/types" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-common" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - "@mdx-js/react" "^1.6.22" - clsx "^1.2.1" - copy-text-to-clipboard "^3.0.1" - infima "0.2.0-alpha.43" +"@docusaurus/preset-classic@^3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/preset-classic/-/preset-classic-3.6.0.tgz#71561f366a266be571022764eb8b9e5618f573eb" + integrity sha512-kpGNdQzr/Dpm7o3b1iaQrz4DMDx3WIeBbl4V4P4maa2zAQkTdlaP4CMgA5oKrRrpqPLnQFsUM/b+qf2glhl2Tw== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/plugin-content-blog" "3.6.0" + "@docusaurus/plugin-content-docs" "3.6.0" + "@docusaurus/plugin-content-pages" "3.6.0" + "@docusaurus/plugin-debug" "3.6.0" + "@docusaurus/plugin-google-analytics" "3.6.0" + "@docusaurus/plugin-google-gtag" "3.6.0" + "@docusaurus/plugin-google-tag-manager" "3.6.0" + "@docusaurus/plugin-sitemap" "3.6.0" + "@docusaurus/theme-classic" "3.6.0" + "@docusaurus/theme-common" "3.6.0" + "@docusaurus/theme-search-algolia" "3.6.0" + "@docusaurus/types" "3.6.0" + +"@docusaurus/theme-classic@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-classic/-/theme-classic-3.6.0.tgz#8f34b65c85f5082deb3633a893974d2eee309121" + integrity sha512-sAXNfwPL6uRD+BuHuKXZfAXud7SS7IK/JdrPuzyQxdO1gJKzI5GFfe1ED1QoJDNWJWJ01JHE5rSnwYLEADc2rQ== + dependencies: + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/module-type-aliases" "3.6.0" + "@docusaurus/plugin-content-blog" "3.6.0" + "@docusaurus/plugin-content-docs" "3.6.0" + "@docusaurus/plugin-content-pages" "3.6.0" + "@docusaurus/theme-common" "3.6.0" + "@docusaurus/theme-translations" "3.6.0" + "@docusaurus/types" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + "@mdx-js/react" "^3.0.0" + clsx "^2.0.0" + copy-text-to-clipboard "^3.2.0" + infima "0.2.0-alpha.45" lodash "^4.17.21" nprogress "^0.2.0" - postcss "^8.4.14" - prism-react-renderer "^1.3.5" - prismjs "^1.28.0" - react-router-dom "^5.3.3" - rtlcss "^3.5.0" - tslib "^2.4.0" + postcss "^8.4.26" + prism-react-renderer "^2.3.0" + prismjs "^1.29.0" + react-router-dom "^5.3.4" + rtlcss "^4.1.0" + tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-common@2.4.0", "@docusaurus/theme-common@^2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/theme-common/-/theme-common-2.4.0.tgz" - integrity sha512-IkG/l5f/FLY6cBIxtPmFnxpuPzc5TupuqlOx+XDN+035MdQcAh8wHXXZJAkTeYDeZ3anIUSUIvWa7/nRKoQEfg== - dependencies: - "@docusaurus/mdx-loader" "2.4.0" - "@docusaurus/module-type-aliases" "2.4.0" - "@docusaurus/plugin-content-blog" "2.4.0" - "@docusaurus/plugin-content-docs" "2.4.0" - "@docusaurus/plugin-content-pages" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-common" "2.4.0" +"@docusaurus/theme-common@3.6.0", "@docusaurus/theme-common@^3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-common/-/theme-common-3.6.0.tgz#9a061d278df76da0f70a9465cd0b7299c14d03d3" + integrity sha512-frjlYE5sRs+GuPs4XXlp9aMLI2O4H5FPpznDAXBrCm+8EpWRiIb443ePMxM3IyMCQ5bwFlki0PI9C+r4apstnw== + dependencies: + "@docusaurus/mdx-loader" "3.6.0" + "@docusaurus/module-type-aliases" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" "@types/history" "^4.7.11" "@types/react" "*" "@types/react-router-config" "*" - clsx "^1.2.1" + clsx "^2.0.0" parse-numeric-range "^1.3.0" - prism-react-renderer "^1.3.5" - tslib "^2.4.0" - use-sync-external-store "^1.2.0" + prism-react-renderer "^2.3.0" + tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-search-algolia@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-2.4.0.tgz" - integrity sha512-pPCJSCL1Qt4pu/Z0uxBAuke0yEBbxh0s4fOvimna7TEcBLPq0x06/K78AaABXrTVQM6S0vdocFl9EoNgU17hqA== - dependencies: - "@docsearch/react" "^3.1.1" - "@docusaurus/core" "2.4.0" - "@docusaurus/logger" "2.4.0" - "@docusaurus/plugin-content-docs" "2.4.0" - "@docusaurus/theme-common" "2.4.0" - "@docusaurus/theme-translations" "2.4.0" - "@docusaurus/utils" "2.4.0" - "@docusaurus/utils-validation" "2.4.0" - algoliasearch "^4.13.1" - algoliasearch-helper "^3.10.0" - clsx "^1.2.1" - eta "^2.0.0" - fs-extra "^10.1.0" +"@docusaurus/theme-search-algolia@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.6.0.tgz#47dcfca68f50163abce411dd9b181855a9ec9c83" + integrity sha512-4IwRUkxjrisR8LXBHeE4d2btraWdMficbgiVL3UHvJURmyvgzMBZQP8KrK8rjdXeu8SuRxSmeV6NSVomRvdbEg== + dependencies: + "@docsearch/react" "^3.5.2" + "@docusaurus/core" "3.6.0" + "@docusaurus/logger" "3.6.0" + "@docusaurus/plugin-content-docs" "3.6.0" + "@docusaurus/theme-common" "3.6.0" + "@docusaurus/theme-translations" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-validation" "3.6.0" + algoliasearch "^4.18.0" + algoliasearch-helper "^3.13.3" + clsx "^2.0.0" + eta "^2.2.0" + fs-extra "^11.1.1" lodash "^4.17.21" - tslib "^2.4.0" + tslib "^2.6.0" utility-types "^3.10.0" -"@docusaurus/theme-translations@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/theme-translations/-/theme-translations-2.4.0.tgz" - integrity sha512-kEoITnPXzDPUMBHk3+fzEzbopxLD3fR5sDoayNH0vXkpUukA88/aDL1bqkhxWZHA3LOfJ3f0vJbOwmnXW5v85Q== +"@docusaurus/theme-translations@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/theme-translations/-/theme-translations-3.6.0.tgz#93994e931f340c1712c81ac80dbab5750c24634f" + integrity sha512-L555X8lWE3fv8VaF0Bc1VnAgi10UvRKFcvADHiYR7Gj37ItaWP5i7xLHsSw7fi/SHTXe5wfIeCFNqUYHyCOHAQ== dependencies: - fs-extra "^10.1.0" - tslib "^2.4.0" + fs-extra "^11.1.1" + tslib "^2.6.0" -"@docusaurus/types@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/types/-/types-2.4.0.tgz" - integrity sha512-xaBXr+KIPDkIaef06c+i2HeTqVNixB7yFut5fBXPGI2f1rrmEV2vLMznNGsFwvZ5XmA3Quuefd4OGRkdo97Dhw== +"@docusaurus/types@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/types/-/types-3.6.0.tgz#8fa82332a7c7b8093b5c55e1115f5854ce484978" + integrity sha512-jADLgoZGWhAzThr+mRiyuFD4OUzt6jHnb7NRArRKorgxckqUBaPyFOau9hhbcSTHtU6ceyeWjN7FDt7uG2Hplw== dependencies: + "@mdx-js/mdx" "^3.0.0" "@types/history" "^4.7.11" "@types/react" "*" commander "^5.1.0" - joi "^17.6.0" + joi "^17.9.2" react-helmet-async "^1.3.0" utility-types "^3.10.0" - webpack "^5.73.0" - webpack-merge "^5.8.0" + webpack "^5.95.0" + webpack-merge "^5.9.0" -"@docusaurus/utils-common@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/utils-common/-/utils-common-2.4.0.tgz" - integrity sha512-zIMf10xuKxddYfLg5cS19x44zud/E9I7lj3+0bv8UIs0aahpErfNrGhijEfJpAfikhQ8tL3m35nH3hJ3sOG82A== +"@docusaurus/utils-common@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-common/-/utils-common-3.6.0.tgz#11855ea503132bbcaba6ca4d351293ff10a75d34" + integrity sha512-diUDNfbw33GaZMmKwdTckT2IBfVouXLXRD+zphH9ywswuaEIKqixvuf5g41H7MBBrlMsxhna3uTMoB4B/OPDcA== dependencies: - tslib "^2.4.0" + tslib "^2.6.0" -"@docusaurus/utils-validation@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/utils-validation/-/utils-validation-2.4.0.tgz" - integrity sha512-IrBsBbbAp6y7mZdJx4S4pIA7dUyWSA0GNosPk6ZJ0fX3uYIEQgcQSGIgTeSC+8xPEx3c16o03en1jSDpgQgz/w== - dependencies: - "@docusaurus/logger" "2.4.0" - "@docusaurus/utils" "2.4.0" - joi "^17.6.0" +"@docusaurus/utils-validation@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils-validation/-/utils-validation-3.6.0.tgz#5557ca14fa64ac29e6f70e61006be721395ecde5" + integrity sha512-CRHiKKJEKA0GFlfOf71JWHl7PtwOyX0+Zg9ep9NFEZv6Lcx3RJ9nhl7p8HRjPL6deyYceavM//BsfW4pCI4BtA== + dependencies: + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + fs-extra "^11.2.0" + joi "^17.9.2" js-yaml "^4.1.0" - tslib "^2.4.0" + lodash "^4.17.21" + tslib "^2.6.0" -"@docusaurus/utils@2.4.0": - version "2.4.0" - resolved "https://registry.npmjs.org/@docusaurus/utils/-/utils-2.4.0.tgz" - integrity sha512-89hLYkvtRX92j+C+ERYTuSUK6nF9bGM32QThcHPg2EDDHVw6FzYQXmX6/p+pU5SDyyx5nBlE4qXR92RxCAOqfg== +"@docusaurus/utils@3.6.0": + version "3.6.0" + resolved "https://registry.yarnpkg.com/@docusaurus/utils/-/utils-3.6.0.tgz#192785da6fd62dfd83d6f1879c3aa45547f5df23" + integrity sha512-VKczAutI4mptiAw/WcYEu5WeVhQ6Q1zdIUl64SGw9K++9lziH+Kt10Ee8l2dMpRkiUk6zzK20kMNlX2WCUwXYQ== dependencies: - "@docusaurus/logger" "2.4.0" - "@svgr/webpack" "^6.2.1" + "@docusaurus/logger" "3.6.0" + "@docusaurus/utils-common" "3.6.0" + "@svgr/webpack" "^8.1.0" escape-string-regexp "^4.0.0" file-loader "^6.2.0" - fs-extra "^10.1.0" - github-slugger "^1.4.0" + fs-extra "^11.1.1" + github-slugger "^1.5.0" globby "^11.1.0" gray-matter "^4.0.3" + jiti "^1.20.0" js-yaml "^4.1.0" lodash "^4.17.21" micromatch "^4.0.5" + prompts "^2.4.2" resolve-pathname "^3.0.0" shelljs "^0.8.5" - tslib "^2.4.0" + tslib "^2.6.0" url-loader "^4.1.1" - webpack "^5.73.0" + utility-types "^3.10.0" + webpack "^5.88.1" -"@emotion/babel-plugin@^11.11.0": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz" - integrity sha512-m4HEDZleaaCH+XgDDsPF15Ht6wTLsgDTeR3WYj9Q/k76JtWhrJjcP4+/XlG8LGT/Rol9qUfOIztXeA84ATpqPQ== +"@emotion/babel-plugin@^11.12.0": + version "11.12.0" + resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.12.0.tgz#7b43debb250c313101b3f885eba634f1d723fcc2" + integrity sha512-y2WQb+oP8Jqvvclh8Q55gLUyb7UFvgv7eJfsj7td5TToBrIUtPay2kMrZi4xjq9qw2vD0ZR5fSho0yqoFgX7Rw== dependencies: "@babel/helper-module-imports" "^7.16.7" "@babel/runtime" "^7.18.3" - "@emotion/hash" "^0.9.1" - "@emotion/memoize" "^0.8.1" - "@emotion/serialize" "^1.1.2" + "@emotion/hash" "^0.9.2" + "@emotion/memoize" "^0.9.0" + "@emotion/serialize" "^1.2.0" babel-plugin-macros "^3.1.0" convert-source-map "^1.5.0" escape-string-regexp "^4.0.0" @@ -1717,95 +1819,95 @@ source-map "^0.5.7" stylis "4.2.0" -"@emotion/cache@^11.11.0": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/cache/-/cache-11.11.0.tgz" - integrity sha512-P34z9ssTCBi3e9EI1ZsWpNHcfY1r09ZO0rZbRO2ob3ZQMnFI35jB536qoXbkdesr5EUhYi22anuEJuyxifaqAQ== +"@emotion/cache@^11.11.0", "@emotion/cache@^11.13.0": + version "11.13.1" + resolved "https://registry.yarnpkg.com/@emotion/cache/-/cache-11.13.1.tgz#fecfc54d51810beebf05bf2a161271a1a91895d7" + integrity sha512-iqouYkuEblRcXmylXIwwOodiEK5Ifl7JcX7o6V4jI3iW4mLXX3dmt5xwBtIkJiQEXFAI+pC8X0i67yiPkH9Ucw== dependencies: - "@emotion/memoize" "^0.8.1" - "@emotion/sheet" "^1.2.2" - "@emotion/utils" "^1.2.1" - "@emotion/weak-memoize" "^0.3.1" + "@emotion/memoize" "^0.9.0" + "@emotion/sheet" "^1.4.0" + "@emotion/utils" "^1.4.0" + "@emotion/weak-memoize" "^0.4.0" stylis "4.2.0" -"@emotion/hash@^0.9.1": - version "0.9.1" - resolved "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.1.tgz" - integrity sha512-gJB6HLm5rYwSLI6PQa+X1t5CFGrv1J1TWG+sOyMCeKz2ojaj6Fnl/rZEspogG+cvqbt4AE/2eIyD2QfLKTBNlQ== +"@emotion/hash@^0.9.2": + version "0.9.2" + resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.9.2.tgz#ff9221b9f58b4dfe61e619a7788734bd63f6898b" + integrity sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g== -"@emotion/is-prop-valid@^1.2.1": - version "1.2.1" - resolved "https://registry.npmjs.org/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz" - integrity sha512-61Mf7Ufx4aDxx1xlDeOm8aFFigGHE4z+0sKCa+IHCeZKiyP9RLD0Mmx7m8b9/Cf37f7NAvQOOJAbQQGVr5uERw== +"@emotion/is-prop-valid@^1.3.0": + version "1.3.1" + resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.3.1.tgz#8d5cf1132f836d7adbe42cf0b49df7816fc88240" + integrity sha512-/ACwoqx7XQi9knQs/G0qKvv5teDMhD7bXYns9N/wM8ah8iNb8jZ2uNO0YOgiq2o2poIvVtJS2YALasQuMSQ7Kw== dependencies: - "@emotion/memoize" "^0.8.1" + "@emotion/memoize" "^0.9.0" -"@emotion/memoize@^0.8.1": - version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/memoize/-/memoize-0.8.1.tgz" - integrity sha512-W2P2c/VRW1/1tLox0mVUalvnWXxavmv/Oum2aPsRcoDJuob75FC3Y8FbpfLwUegRcxINtGUMPq0tFCvYNTBXNA== +"@emotion/memoize@^0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.9.0.tgz#745969d649977776b43fc7648c556aaa462b4102" + integrity sha512-30FAj7/EoJ5mwVPOWhAyCX+FPfMDrVecJAM+Iw9NRoSl4BBAQeqj4cApHHUXOVvIPgLVDsCFoz/hGD+5QQD1GQ== -"@emotion/react@^11.11.1": - version "11.11.1" - resolved "https://registry.npmjs.org/@emotion/react/-/react-11.11.1.tgz" - integrity sha512-5mlW1DquU5HaxjLkfkGN1GA/fvVGdyHURRiX/0FHl2cfIfRxSOfmxEH5YS43edp0OldZrZ+dkBKbngxcNCdZvA== +"@emotion/react@^11.13.3": + version "11.13.3" + resolved "https://registry.yarnpkg.com/@emotion/react/-/react-11.13.3.tgz#a69d0de2a23f5b48e0acf210416638010e4bd2e4" + integrity sha512-lIsdU6JNrmYfJ5EbUCf4xW1ovy5wKQ2CkPRM4xogziOxH1nXxBSjpC9YqbFAP7circxMfYp+6x676BqWcEiixg== dependencies: "@babel/runtime" "^7.18.3" - "@emotion/babel-plugin" "^11.11.0" - "@emotion/cache" "^11.11.0" - "@emotion/serialize" "^1.1.2" - "@emotion/use-insertion-effect-with-fallbacks" "^1.0.1" - "@emotion/utils" "^1.2.1" - "@emotion/weak-memoize" "^0.3.1" + "@emotion/babel-plugin" "^11.12.0" + "@emotion/cache" "^11.13.0" + "@emotion/serialize" "^1.3.1" + "@emotion/use-insertion-effect-with-fallbacks" "^1.1.0" + "@emotion/utils" "^1.4.0" + "@emotion/weak-memoize" "^0.4.0" hoist-non-react-statics "^3.3.1" -"@emotion/serialize@^1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@emotion/serialize/-/serialize-1.1.2.tgz" - integrity sha512-zR6a/fkFP4EAcCMQtLOhIgpprZOwNmCldtpaISpvz348+DP4Mz8ZoKaGGCQpbzepNIUWbq4w6hNZkwDyKoS+HA== +"@emotion/serialize@^1.2.0", "@emotion/serialize@^1.3.0", "@emotion/serialize@^1.3.1": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-1.3.2.tgz#e1c1a2e90708d5d85d81ccaee2dfeb3cc0cccf7a" + integrity sha512-grVnMvVPK9yUVE6rkKfAJlYZgo0cu3l9iMC77V7DW6E1DUIrU68pSEXRmFZFOFB1QFo57TncmOcvcbMDWsL4yA== dependencies: - "@emotion/hash" "^0.9.1" - "@emotion/memoize" "^0.8.1" - "@emotion/unitless" "^0.8.1" - "@emotion/utils" "^1.2.1" + "@emotion/hash" "^0.9.2" + "@emotion/memoize" "^0.9.0" + "@emotion/unitless" "^0.10.0" + "@emotion/utils" "^1.4.1" csstype "^3.0.2" -"@emotion/sheet@^1.2.2": - version "1.2.2" - resolved "https://registry.npmjs.org/@emotion/sheet/-/sheet-1.2.2.tgz" - integrity sha512-0QBtGvaqtWi+nx6doRwDdBIzhNdZrXUppvTM4dtZZWEGTXL/XE/yJxLMGlDT1Gt+UHH5IX1n+jkXyytE/av7OA== +"@emotion/sheet@^1.4.0": + version "1.4.0" + resolved "https://registry.yarnpkg.com/@emotion/sheet/-/sheet-1.4.0.tgz#c9299c34d248bc26e82563735f78953d2efca83c" + integrity sha512-fTBW9/8r2w3dXWYM4HCB1Rdp8NLibOw2+XELH5m5+AkWiL/KqYX6dc0kKYlaYyKjrQ6ds33MCdMPEwgs2z1rqg== -"@emotion/styled@^11.11.0": - version "11.11.0" - resolved "https://registry.npmjs.org/@emotion/styled/-/styled-11.11.0.tgz" - integrity sha512-hM5Nnvu9P3midq5aaXj4I+lnSfNi7Pmd4EWk1fOZ3pxookaQTNew6bp4JaCBYM4HVFZF9g7UjJmsUmC2JlxOng== +"@emotion/styled@^11.13.0": + version "11.13.0" + resolved "https://registry.yarnpkg.com/@emotion/styled/-/styled-11.13.0.tgz#633fd700db701472c7a5dbef54d6f9834e9fb190" + integrity sha512-tkzkY7nQhW/zC4hztlwucpT8QEZ6eUzpXDRhww/Eej4tFfO0FxQYWRyg/c5CCXa4d/f174kqeXYjuQRnhzf6dA== dependencies: "@babel/runtime" "^7.18.3" - "@emotion/babel-plugin" "^11.11.0" - "@emotion/is-prop-valid" "^1.2.1" - "@emotion/serialize" "^1.1.2" - "@emotion/use-insertion-effect-with-fallbacks" "^1.0.1" - "@emotion/utils" "^1.2.1" - -"@emotion/unitless@^0.8.1": - version "0.8.1" - resolved "https://registry.npmjs.org/@emotion/unitless/-/unitless-0.8.1.tgz" - integrity sha512-KOEGMu6dmJZtpadb476IsZBclKvILjopjUii3V+7MnXIQCYh8W3NgNcgwo21n9LXZX6EDIKvqfjYxXebDwxKmQ== - -"@emotion/use-insertion-effect-with-fallbacks@^1.0.1": - version "1.0.1" - resolved "https://registry.npmjs.org/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.0.1.tgz" - integrity sha512-jT/qyKZ9rzLErtrjGgdkMBn2OP8wl0G3sQlBb3YPryvKHsjvINUhVaPFfP+fpBcOkmrVOVEEHQFJ7nbj2TH2gw== + "@emotion/babel-plugin" "^11.12.0" + "@emotion/is-prop-valid" "^1.3.0" + "@emotion/serialize" "^1.3.0" + "@emotion/use-insertion-effect-with-fallbacks" "^1.1.0" + "@emotion/utils" "^1.4.0" + +"@emotion/unitless@^0.10.0": + version "0.10.0" + resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.10.0.tgz#2af2f7c7e5150f497bdabd848ce7b218a27cf745" + integrity sha512-dFoMUuQA20zvtVTuxZww6OHoJYgrzfKM1t52mVySDJnMSEa08ruEvdYQbhvyu6soU+NeLVd3yKfTfT0NeV6qGg== + +"@emotion/use-insertion-effect-with-fallbacks@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@emotion/use-insertion-effect-with-fallbacks/-/use-insertion-effect-with-fallbacks-1.1.0.tgz#1a818a0b2c481efba0cf34e5ab1e0cb2dcb9dfaf" + integrity sha512-+wBOcIV5snwGgI2ya3u99D7/FJquOIniQT1IKyDsBmEgwvpxMNeS65Oib7OnE2d2aY+3BU4OiH+0Wchf8yk3Hw== -"@emotion/utils@^1.2.1": - version "1.2.1" - resolved "https://registry.npmjs.org/@emotion/utils/-/utils-1.2.1.tgz" - integrity sha512-Y2tGf3I+XVnajdItskUCn6LX+VUDmP6lTL4fcqsXAv43dnlbZiuW4MWQW38rW/BVWSE7Q/7+XQocmpnRYILUmg== +"@emotion/utils@^1.4.0", "@emotion/utils@^1.4.1": + version "1.4.1" + resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-1.4.1.tgz#b3adbb43de12ee2149541c4f1337d2eb7774f0ad" + integrity sha512-BymCXzCG3r72VKJxaYVwOXATqXIZ85cuvg0YOUDxMGNrKc1DJRZk8MgV5wyXRyEayIMd4FuXJIUgTBXvDNW5cA== -"@emotion/weak-memoize@^0.3.1": - version "0.3.1" - resolved "https://registry.npmjs.org/@emotion/weak-memoize/-/weak-memoize-0.3.1.tgz" - integrity sha512-EsBwpc7hBUJWAsNPBmJy4hxWx12v6bshQsldrVmjxJoc3isbxhOrF2IcCpaXxfvq03NwkI7sbsOLXbYuqF/8Ww== +"@emotion/weak-memoize@^0.4.0": + version "0.4.0" + resolved "https://registry.yarnpkg.com/@emotion/weak-memoize/-/weak-memoize-0.4.0.tgz#5e13fac887f08c44f76b0ccaf3370eb00fec9bb6" + integrity sha512-snKqtPW01tN0ui7yu9rGv69aJXr/a/Ywvl11sUjNtEcRc+ng/mQriFL0wLXMef74iHa/EkftbDzU9F8iFbH+zg== "@floating-ui/core@^1.4.2": version "1.5.1" @@ -1835,22 +1937,41 @@ integrity sha512-OfX7E2oUDYxtBvsuS4e/jSn4Q9Qb6DzgeYtsAdkPZ47znpoNsMgZw0+tVijiv3uGNR6dgNlty6r9rzIzHjtd/A== "@fontsource/roboto@^5.0.8": - version "5.0.8" - resolved "https://registry.npmjs.org/@fontsource/roboto/-/roboto-5.0.8.tgz" - integrity sha512-XxPltXs5R31D6UZeLIV1td3wTXU3jzd3f2DLsXI8tytMGBkIsGcc9sIyiupRtA8y73HAhuSCeweOoBqf6DbWCA== + version "5.1.0" + resolved "https://registry.yarnpkg.com/@fontsource/roboto/-/roboto-5.1.0.tgz#00230737ec09c60ae877a5e33d067c0607fdd5ba" + integrity sha512-cFRRC1s6RqPygeZ8Uw/acwVHqih8Czjt6Q0MwoUoDe9U3m4dH1HmNDRBZyqlMSFwgNAUKgFImncKdmDHyKpwdg== -"@hapi/hoek@^9.0.0": +"@hapi/hoek@^9.0.0", "@hapi/hoek@^9.3.0": version "9.3.0" resolved "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.3.0.tgz" integrity sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ== -"@hapi/topo@^5.0.0": +"@hapi/topo@^5.1.0": version "5.1.0" - resolved "https://registry.npmjs.org/@hapi/topo/-/topo-5.1.0.tgz" + resolved "https://registry.yarnpkg.com/@hapi/topo/-/topo-5.1.0.tgz#dc448e332c6c6e37a4dc02fd84ba8d44b9afb012" integrity sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg== dependencies: "@hapi/hoek" "^9.0.0" +"@jest/schemas@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03" + integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA== + dependencies: + "@sinclair/typebox" "^0.27.8" + +"@jest/types@^29.6.3": + version "29.6.3" + resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59" + integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw== + dependencies: + "@jest/schemas" "^29.6.3" + "@types/istanbul-lib-coverage" "^2.0.0" + "@types/istanbul-reports" "^3.0.0" + "@types/node" "*" + "@types/yargs" "^17.0.8" + chalk "^4.0.0" + "@jridgewell/gen-mapping@^0.1.0": version "0.1.1" resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz" @@ -1859,7 +1980,7 @@ "@jridgewell/set-array" "^1.0.0" "@jridgewell/sourcemap-codec" "^1.4.10" -"@jridgewell/gen-mapping@^0.3.0", "@jridgewell/gen-mapping@^0.3.2": +"@jridgewell/gen-mapping@^0.3.0": version "0.3.2" resolved "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.2.tgz" integrity sha512-mh65xKQAzI6iBcFzwv28KVWSmCkdRBWoOh+bYQGW3+6OZvbbN3TqMGo5hqYxQniRcH9F2VZIoJCm4pa3BPDK/A== @@ -1923,15 +2044,7 @@ resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a" integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ== -"@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.7", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.18" - resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz" - integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== - dependencies: - "@jridgewell/resolve-uri" "3.1.0" - "@jridgewell/sourcemap-codec" "1.4.14" - -"@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": +"@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.20", "@jridgewell/trace-mapping@^0.3.24", "@jridgewell/trace-mapping@^0.3.25": version "0.3.25" resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz#15f190e98895f3fc23276ee14bc76b675c2e50f0" integrity sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ== @@ -1939,68 +2052,55 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" +"@jridgewell/trace-mapping@^0.3.9": + version "0.3.18" + resolved "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.18.tgz" + integrity sha512-w+niJYzMHdd7USdiH2U6869nqhD2nbfZXND5Yp93qIbEmnDNk7PD48o+YchRVpzMU7M6jVCbenTR7PA1FLQ9pA== + dependencies: + "@jridgewell/resolve-uri" "3.1.0" + "@jridgewell/sourcemap-codec" "1.4.14" + "@leichtgewicht/ip-codec@^2.0.1": version "2.0.4" resolved "https://registry.npmjs.org/@leichtgewicht/ip-codec/-/ip-codec-2.0.4.tgz" integrity sha512-Hcv+nVC0kZnQ3tD9GVu5xSMR4VVYOteQIr/hwFPVEvPdlXqgGEuRjiheChHgdM+JyqdgNcmzZOX/tnl0JOiI7A== -"@mdx-js/mdx@^1.6.22": - version "1.6.22" - resolved "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-1.6.22.tgz" - integrity sha512-AMxuLxPz2j5/6TpF/XSdKpQP1NlG0z11dFOlq+2IP/lSgl11GY8ji6S/rgsViN/L0BDvHvUMruRb7ub+24LUYA== - dependencies: - "@babel/core" "7.12.9" - "@babel/plugin-syntax-jsx" "7.12.1" - "@babel/plugin-syntax-object-rest-spread" "7.8.3" - "@mdx-js/util" "1.6.22" - babel-plugin-apply-mdx-type-prop "1.6.22" - babel-plugin-extract-import-names "1.6.22" - camelcase-css "2.0.1" - detab "2.0.4" - hast-util-raw "6.0.1" - lodash.uniq "4.5.0" - mdast-util-to-hast "10.0.1" - remark-footnotes "2.0.0" - remark-mdx "1.6.22" - remark-parse "8.0.3" - remark-squeeze-paragraphs "4.0.0" - style-to-object "0.3.0" - unified "9.2.0" - unist-builder "2.0.3" - unist-util-visit "2.0.3" - -"@mdx-js/mdx@^2.3.0": - version "2.3.0" - resolved "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-2.3.0.tgz" - integrity sha512-jLuwRlz8DQfQNiUCJR50Y09CGPq3fLtmtUQfVrj79E0JWu3dvsVcxVIcfhR5h0iXu+/z++zDrYeiJqifRynJkA== +"@mdx-js/mdx@^3.0.0", "@mdx-js/mdx@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@mdx-js/mdx/-/mdx-3.1.0.tgz#10235cab8ad7d356c262e8c21c68df5850a97dc3" + integrity sha512-/QxEhPAvGwbQmy1Px8F899L5Uc2KZ6JtXwlCgJmjSTBedwOZkByYcBG4GceIGPXRDsmfxhHazuS+hlOShRLeDw== dependencies: + "@types/estree" "^1.0.0" "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" "@types/mdx" "^2.0.0" - estree-util-build-jsx "^2.0.0" - estree-util-is-identifier-name "^2.0.0" - estree-util-to-js "^1.1.0" + collapse-white-space "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + estree-util-scope "^1.0.0" estree-walker "^3.0.0" - hast-util-to-estree "^2.0.0" - markdown-extensions "^1.0.0" - periscopic "^3.0.0" - remark-mdx "^2.0.0" - remark-parse "^10.0.0" - remark-rehype "^10.0.0" - unified "^10.0.0" - unist-util-position-from-estree "^1.0.0" - unist-util-stringify-position "^3.0.0" - unist-util-visit "^4.0.0" - vfile "^5.0.0" - -"@mdx-js/react@^1.6.22": - version "1.6.22" - resolved "https://registry.npmjs.org/@mdx-js/react/-/react-1.6.22.tgz" - integrity sha512-TDoPum4SHdfPiGSAaRBw7ECyI8VaHpK8GJugbJIJuqyh6kzw9ZLJZW3HGL3NNrJGxcAixUvqROm+YuQOo5eXtg== - -"@mdx-js/util@1.6.22": - version "1.6.22" - resolved "https://registry.npmjs.org/@mdx-js/util/-/util-1.6.22.tgz" - integrity sha512-H1rQc1ZOHANWBvPcW+JpGwr+juXSxM8Q8YCkm3GhZd8REu1fHR3z99CErO1p9pkcfcxZnMdIZdIsXkOHY0NilA== + hast-util-to-jsx-runtime "^2.0.0" + markdown-extensions "^2.0.0" + recma-build-jsx "^1.0.0" + recma-jsx "^1.0.0" + recma-stringify "^1.0.0" + rehype-recma "^1.0.0" + remark-mdx "^3.0.0" + remark-parse "^11.0.0" + remark-rehype "^11.0.0" + source-map "^0.7.0" + unified "^11.0.0" + unist-util-position-from-estree "^2.0.0" + unist-util-stringify-position "^4.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + +"@mdx-js/react@^3.0.0", "@mdx-js/react@^3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@mdx-js/react/-/react-3.1.0.tgz#c4522e335b3897b9a845db1dbdd2f966ae8fb0ed" + integrity sha512-QjHtSaoameoalGnKDT3FoIl4+9RwyTmo9ZJGBdLOks/YOiWHoRDI3PUwEzOE7kEmGcV3AFcp9K6dYu9rEuKLAQ== + dependencies: + "@types/mdx" "^2.0.0" "@mui/base@5.0.0-beta.25": version "5.0.0-beta.25" @@ -2142,162 +2242,162 @@ "@pnpm/network.ca-file" "^1.0.1" config-chain "^1.1.11" -"@polka/url@^1.0.0-next.20": - version "1.0.0-next.21" - resolved "https://registry.npmjs.org/@polka/url/-/url-1.0.0-next.21.tgz" - integrity sha512-a5Sab1C4/icpTZVzZc5Ghpz88yQtGOyNqYXcZgOssB2uuAr+wF/MvN6bgtW32q7HHrvBki+BsZ0OuNv6EV3K9g== +"@polka/url@^1.0.0-next.24": + version "1.0.0-next.28" + resolved "https://registry.yarnpkg.com/@polka/url/-/url-1.0.0-next.28.tgz#d45e01c4a56f143ee69c54dd6b12eade9e270a73" + integrity sha512-8LduaNlMZGwdZ6qWrKlfa+2M4gahzFkprZiAt2TF8uS0qQgBizKXpXURqvTJ4WtmupWxaLqjRb2UCTe72mu+Aw== "@popperjs/core@^2.11.8": version "2.11.8" resolved "https://registry.npmjs.org/@popperjs/core/-/core-2.11.8.tgz" integrity sha512-P1st0aksCrn9sGZhp8GMYwBnQsbvAWsZAX44oXNNvLHGqAOcoVxmjZiohstwQ7SqKnbR47akdNi+uleWD8+g6A== -"@sideway/address@^4.1.3": - version "4.1.4" - resolved "https://registry.npmjs.org/@sideway/address/-/address-4.1.4.tgz" - integrity sha512-7vwq+rOHVWjyXxVlR76Agnvhy8I9rpzjosTESvmhNeXOXdZZB15Fl+TI9x1SiHZH5Jv2wTGduSxFDIaq0m3DUw== +"@sideway/address@^4.1.5": + version "4.1.5" + resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.5.tgz#4bc149a0076623ced99ca8208ba780d65a99b9d5" + integrity sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q== dependencies: "@hapi/hoek" "^9.0.0" -"@sideway/formula@^3.0.0": - version "3.0.0" - resolved "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz" - integrity sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg== +"@sideway/formula@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@sideway/formula/-/formula-3.0.1.tgz#80fcbcbaf7ce031e0ef2dd29b1bfc7c3f583611f" + integrity sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg== "@sideway/pinpoint@^2.0.0": version "2.0.0" resolved "https://registry.npmjs.org/@sideway/pinpoint/-/pinpoint-2.0.0.tgz" integrity sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ== -"@sindresorhus/is@^0.14.0": - version "0.14.0" - resolved "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz" - integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== +"@sinclair/typebox@^0.27.8": + version "0.27.8" + resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e" + integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== + +"@sindresorhus/is@^4.6.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== "@sindresorhus/is@^5.2.0": version "5.6.0" resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-5.6.0.tgz#41dd6093d34652cddb5d5bdeee04eafc33826668" integrity sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g== -"@slorber/static-site-generator-webpack-plugin@^4.0.7": - version "4.0.7" - resolved "https://registry.npmjs.org/@slorber/static-site-generator-webpack-plugin/-/static-site-generator-webpack-plugin-4.0.7.tgz" - integrity sha512-Ug7x6z5lwrz0WqdnNFOMYrDQNTPAprvHLSh6+/fmml3qUiz6l5eq+2MzLKWtn/q5K5NpSiFsZTP/fck/3vjSxA== +"@slorber/remark-comment@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@slorber/remark-comment/-/remark-comment-1.0.0.tgz#2a020b3f4579c89dec0361673206c28d67e08f5a" + integrity sha512-RCE24n7jsOj1M0UPvIQCHTe7fI0sFL4S2nwKVWwHyVr/wI/H8GosgsJGyhnsZoGFnD/P2hLf1mSbrrgSLN93NA== dependencies: - eval "^0.1.8" - p-map "^4.0.0" - webpack-sources "^3.2.2" + micromark-factory-space "^1.0.0" + micromark-util-character "^1.1.0" + micromark-util-symbol "^1.0.1" -"@svgr/babel-plugin-add-jsx-attribute@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-6.0.0.tgz" - integrity sha512-MdPdhdWLtQsjd29Wa4pABdhWbaRMACdM1h31BY+c6FghTZqNGT7pEYdBoaGeKtdTOBC/XNFQaKVj+r/Ei2ryWA== +"@svgr/babel-plugin-add-jsx-attribute@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-8.0.0.tgz#4001f5d5dd87fa13303e36ee106e3ff3a7eb8b22" + integrity sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g== -"@svgr/babel-plugin-remove-jsx-attribute@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-6.0.0.tgz" - integrity sha512-aVdtfx9jlaaxc3unA6l+M9YRnKIZjOhQPthLKqmTXC8UVkBLDRGwPKo+r8n3VZN8B34+yVajzPTZ+ptTSuZZCw== +"@svgr/babel-plugin-remove-jsx-attribute@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-attribute/-/babel-plugin-remove-jsx-attribute-8.0.0.tgz#69177f7937233caca3a1afb051906698f2f59186" + integrity sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA== -"@svgr/babel-plugin-remove-jsx-empty-expression@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-6.0.0.tgz" - integrity sha512-Ccj42ApsePD451AZJJf1QzTD1B/BOU392URJTeXFxSK709i0KUsGtbwyiqsKu7vsYxpTM0IA5clAKDyf9RCZyA== +"@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-remove-jsx-empty-expression/-/babel-plugin-remove-jsx-empty-expression-8.0.0.tgz#c2c48104cfd7dcd557f373b70a56e9e3bdae1d44" + integrity sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA== -"@svgr/babel-plugin-replace-jsx-attribute-value@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-6.0.0.tgz" - integrity sha512-88V26WGyt1Sfd1emBYmBJRWMmgarrExpKNVmI9vVozha4kqs6FzQJ/Kp5+EYli1apgX44518/0+t9+NU36lThQ== +"@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-replace-jsx-attribute-value/-/babel-plugin-replace-jsx-attribute-value-8.0.0.tgz#8fbb6b2e91fa26ac5d4aa25c6b6e4f20f9c0ae27" + integrity sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ== -"@svgr/babel-plugin-svg-dynamic-title@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-6.0.0.tgz" - integrity sha512-F7YXNLfGze+xv0KMQxrl2vkNbI9kzT9oDK55/kUuymh1ACyXkMV+VZWX1zEhSTfEKh7VkHVZGmVtHg8eTZ6PRg== +"@svgr/babel-plugin-svg-dynamic-title@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-dynamic-title/-/babel-plugin-svg-dynamic-title-8.0.0.tgz#1d5ba1d281363fc0f2f29a60d6d936f9bbc657b0" + integrity sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og== -"@svgr/babel-plugin-svg-em-dimensions@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-6.0.0.tgz" - integrity sha512-+rghFXxdIqJNLQK08kwPBD3Z22/0b2tEZ9lKiL/yTfuyj1wW8HUXu4bo/XkogATIYuXSghVQOOCwURXzHGKyZA== +"@svgr/babel-plugin-svg-em-dimensions@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-svg-em-dimensions/-/babel-plugin-svg-em-dimensions-8.0.0.tgz#35e08df300ea8b1d41cb8f62309c241b0369e501" + integrity sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g== -"@svgr/babel-plugin-transform-react-native-svg@^6.0.0": - version "6.0.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-6.0.0.tgz" - integrity sha512-VaphyHZ+xIKv5v0K0HCzyfAaLhPGJXSk2HkpYfXIOKb7DjLBv0soHDxNv6X0vr2titsxE7klb++u7iOf7TSrFQ== +"@svgr/babel-plugin-transform-react-native-svg@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-react-native-svg/-/babel-plugin-transform-react-native-svg-8.1.0.tgz#90a8b63998b688b284f255c6a5248abd5b28d754" + integrity sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q== -"@svgr/babel-plugin-transform-svg-component@^6.2.0": - version "6.2.0" - resolved "https://registry.npmjs.org/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-6.2.0.tgz" - integrity sha512-bhYIpsORb++wpsp91fymbFkf09Z/YEKR0DnFjxvN+8JHeCUD2unnh18jIMKnDJTWtvpTaGYPXELVe4OOzFI0xg== +"@svgr/babel-plugin-transform-svg-component@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-transform-svg-component/-/babel-plugin-transform-svg-component-8.0.0.tgz#013b4bfca88779711f0ed2739f3f7efcefcf4f7e" + integrity sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw== -"@svgr/babel-preset@^6.2.0": - version "6.2.0" - resolved "https://registry.npmjs.org/@svgr/babel-preset/-/babel-preset-6.2.0.tgz" - integrity sha512-4WQNY0J71JIaL03DRn0vLiz87JXx0b9dYm2aA8XHlQJQoixMl4r/soYHm8dsaJZ3jWtkCiOYy48dp9izvXhDkQ== - dependencies: - "@svgr/babel-plugin-add-jsx-attribute" "^6.0.0" - "@svgr/babel-plugin-remove-jsx-attribute" "^6.0.0" - "@svgr/babel-plugin-remove-jsx-empty-expression" "^6.0.0" - "@svgr/babel-plugin-replace-jsx-attribute-value" "^6.0.0" - "@svgr/babel-plugin-svg-dynamic-title" "^6.0.0" - "@svgr/babel-plugin-svg-em-dimensions" "^6.0.0" - "@svgr/babel-plugin-transform-react-native-svg" "^6.0.0" - "@svgr/babel-plugin-transform-svg-component" "^6.2.0" - -"@svgr/core@^6.2.1": - version "6.2.1" - resolved "https://registry.npmjs.org/@svgr/core/-/core-6.2.1.tgz" - integrity sha512-NWufjGI2WUyrg46mKuySfviEJ6IxHUOm/8a3Ph38VCWSp+83HBraCQrpEM3F3dB6LBs5x8OElS8h3C0oOJaJAA== +"@svgr/babel-preset@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/babel-preset/-/babel-preset-8.1.0.tgz#0e87119aecdf1c424840b9d4565b7137cabf9ece" + integrity sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug== + dependencies: + "@svgr/babel-plugin-add-jsx-attribute" "8.0.0" + "@svgr/babel-plugin-remove-jsx-attribute" "8.0.0" + "@svgr/babel-plugin-remove-jsx-empty-expression" "8.0.0" + "@svgr/babel-plugin-replace-jsx-attribute-value" "8.0.0" + "@svgr/babel-plugin-svg-dynamic-title" "8.0.0" + "@svgr/babel-plugin-svg-em-dimensions" "8.0.0" + "@svgr/babel-plugin-transform-react-native-svg" "8.1.0" + "@svgr/babel-plugin-transform-svg-component" "8.0.0" + +"@svgr/core@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/core/-/core-8.1.0.tgz#41146f9b40b1a10beaf5cc4f361a16a3c1885e88" + integrity sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA== dependencies: - "@svgr/plugin-jsx" "^6.2.1" + "@babel/core" "^7.21.3" + "@svgr/babel-preset" "8.1.0" camelcase "^6.2.0" - cosmiconfig "^7.0.1" + cosmiconfig "^8.1.3" + snake-case "^3.0.4" -"@svgr/hast-util-to-babel-ast@^6.2.1": - version "6.2.1" - resolved "https://registry.npmjs.org/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-6.2.1.tgz" - integrity sha512-pt7MMkQFDlWJVy9ULJ1h+hZBDGFfSCwlBNW1HkLnVi7jUhyEXUaGYWi1x6bM2IXuAR9l265khBT4Av4lPmaNLQ== +"@svgr/hast-util-to-babel-ast@8.0.0": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@svgr/hast-util-to-babel-ast/-/hast-util-to-babel-ast-8.0.0.tgz#6952fd9ce0f470e1aded293b792a2705faf4ffd4" + integrity sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q== dependencies: - "@babel/types" "^7.15.6" - entities "^3.0.1" + "@babel/types" "^7.21.3" + entities "^4.4.0" -"@svgr/plugin-jsx@^6.2.1": - version "6.2.1" - resolved "https://registry.npmjs.org/@svgr/plugin-jsx/-/plugin-jsx-6.2.1.tgz" - integrity sha512-u+MpjTsLaKo6r3pHeeSVsh9hmGRag2L7VzApWIaS8imNguqoUwDq/u6U/NDmYs/KAsrmtBjOEaAAPbwNGXXp1g== +"@svgr/plugin-jsx@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-jsx/-/plugin-jsx-8.1.0.tgz#96969f04a24b58b174ee4cd974c60475acbd6928" + integrity sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA== dependencies: - "@babel/core" "^7.15.5" - "@svgr/babel-preset" "^6.2.0" - "@svgr/hast-util-to-babel-ast" "^6.2.1" - svg-parser "^2.0.2" + "@babel/core" "^7.21.3" + "@svgr/babel-preset" "8.1.0" + "@svgr/hast-util-to-babel-ast" "8.0.0" + svg-parser "^2.0.4" -"@svgr/plugin-svgo@^6.2.0": - version "6.2.0" - resolved "https://registry.npmjs.org/@svgr/plugin-svgo/-/plugin-svgo-6.2.0.tgz" - integrity sha512-oDdMQONKOJEbuKwuy4Np6VdV6qoaLLvoY86hjvQEgU82Vx1MSWRyYms6Sl0f+NtqxLI/rDVufATbP/ev996k3Q== +"@svgr/plugin-svgo@8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/plugin-svgo/-/plugin-svgo-8.1.0.tgz#b115b7b967b564f89ac58feae89b88c3decd0f00" + integrity sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA== dependencies: - cosmiconfig "^7.0.1" - deepmerge "^4.2.2" - svgo "^2.5.0" + cosmiconfig "^8.1.3" + deepmerge "^4.3.1" + svgo "^3.0.2" -"@svgr/webpack@^6.2.1": - version "6.2.1" - resolved "https://registry.npmjs.org/@svgr/webpack/-/webpack-6.2.1.tgz" - integrity sha512-h09ngMNd13hnePwgXa+Y5CgOjzlCvfWLHg+MBnydEedAnuLRzUHUJmGS3o2OsrhxTOOqEsPOFt5v/f6C5Qulcw== - dependencies: - "@babel/core" "^7.15.5" - "@babel/plugin-transform-react-constant-elements" "^7.14.5" - "@babel/preset-env" "^7.15.6" - "@babel/preset-react" "^7.14.5" - "@babel/preset-typescript" "^7.15.0" - "@svgr/core" "^6.2.1" - "@svgr/plugin-jsx" "^6.2.1" - "@svgr/plugin-svgo" "^6.2.0" - -"@szmarczak/http-timer@^1.1.2": - version "1.1.2" - resolved "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz" - integrity sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA== +"@svgr/webpack@^8.1.0": + version "8.1.0" + resolved "https://registry.yarnpkg.com/@svgr/webpack/-/webpack-8.1.0.tgz#16f1b5346f102f89fda6ec7338b96a701d8be0c2" + integrity sha512-LnhVjMWyMQV9ZmeEy26maJk+8HTIbd59cH4F2MJ439k9DqejRisfFNGAPvRYlKETuh9LrImlS8aKsBgKjMA8WA== dependencies: - defer-to-connect "^1.0.1" + "@babel/core" "^7.21.3" + "@babel/plugin-transform-react-constant-elements" "^7.21.3" + "@babel/preset-env" "^7.20.2" + "@babel/preset-react" "^7.18.6" + "@babel/preset-typescript" "^7.21.0" + "@svgr/core" "8.1.0" + "@svgr/plugin-jsx" "8.1.0" + "@svgr/plugin-svgo" "8.1.0" "@szmarczak/http-timer@^5.0.1": version "5.0.1" @@ -2311,10 +2411,10 @@ resolved "https://registry.npmjs.org/@trysound/sax/-/sax-0.2.0.tgz" integrity sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA== -"@tsconfig/docusaurus@^1.0.5": - version "1.0.6" - resolved "https://registry.npmjs.org/@tsconfig/docusaurus/-/docusaurus-1.0.6.tgz" - integrity sha512-1QxDaP54hpzM6bq9E+yFEo4F9WbWHhsDe4vktZXF/iDlc9FqGr9qlg+3X/nuKQXx8QxHV7ue8NXFazzajsxFBA== +"@tsconfig/docusaurus@^2.0.3": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@tsconfig/docusaurus/-/docusaurus-2.0.3.tgz#08a4a65e038a499fc4154ce32b538913d9f3c90d" + integrity sha512-3l1L5PzWVa7l0691TjnsZ0yOIEwG9DziSqu5IPZPlI5Dowi7z42cEym8Y35GHbgHvPcBfNxfrbxm7Cncn4nByQ== "@types/acorn@^4.0.0": version "4.0.6" @@ -2360,6 +2460,22 @@ dependencies: "@types/ms" "*" +"@types/eslint-scope@^3.7.7": + version "3.7.7" + resolved "https://registry.yarnpkg.com/@types/eslint-scope/-/eslint-scope-3.7.7.tgz#3108bd5f18b0cdb277c867b3dd449c9ed7079ac5" + integrity sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg== + dependencies: + "@types/eslint" "*" + "@types/estree" "*" + +"@types/eslint@*": + version "9.6.1" + resolved "https://registry.yarnpkg.com/@types/eslint/-/eslint-9.6.1.tgz#d5795ad732ce81715f27f75da913004a56751584" + integrity sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag== + dependencies: + "@types/estree" "*" + "@types/json-schema" "*" + "@types/estree-jsx@^1.0.0": version "1.0.0" resolved "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.0.tgz" @@ -2377,10 +2493,10 @@ resolved "https://registry.npmjs.org/@types/estree/-/estree-1.0.0.tgz" integrity sha512-WulqXMDUTYAXCjZnk6JtIHPigp55cVtDgDrO2gHRwhyJto21+1zbVCtOYB2L1F9w4qCQ0rOGWBnBe0FNTiEJIQ== -"@types/estree@^1.0.5": - version "1.0.5" - resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.5.tgz#a6ce3e556e00fd9895dd872dd172ad0d4bd687f4" - integrity sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw== +"@types/estree@^1.0.6": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50" + integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw== "@types/express-serve-static-core@*", "@types/express-serve-static-core@^4.17.18": version "4.17.29" @@ -2401,6 +2517,11 @@ "@types/qs" "*" "@types/serve-static" "*" +"@types/gtag.js@^0.0.12": + version "0.0.12" + resolved "https://registry.yarnpkg.com/@types/gtag.js/-/gtag.js-0.0.12.tgz#095122edca896689bdfcdd73b057e23064d23572" + integrity sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg== + "@types/hast@^2.0.0": version "2.3.4" resolved "https://registry.npmjs.org/@types/hast/-/hast-2.3.4.tgz" @@ -2408,6 +2529,13 @@ dependencies: "@types/unist" "*" +"@types/hast@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/hast/-/hast-3.0.4.tgz#1d6b39993b82cea6ad783945b0508c25903e15aa" + integrity sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ== + dependencies: + "@types/unist" "*" + "@types/history@^4.7.11": version "4.7.11" resolved "https://registry.npmjs.org/@types/history/-/history-4.7.11.tgz" @@ -2430,15 +2558,39 @@ dependencies: "@types/node" "*" +"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0": + version "2.0.6" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7" + integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w== + +"@types/istanbul-lib-report@*": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf" + integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA== + dependencies: + "@types/istanbul-lib-coverage" "*" + +"@types/istanbul-reports@^3.0.0": + version "3.0.4" + resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54" + integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ== + dependencies: + "@types/istanbul-lib-report" "*" + +"@types/json-schema@*": + version "7.0.15" + resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841" + integrity sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA== + "@types/json-schema@^7.0.4", "@types/json-schema@^7.0.5", "@types/json-schema@^7.0.8", "@types/json-schema@^7.0.9": version "7.0.11" resolved "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz" integrity sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ== -"@types/mdast@^3.0.0": - version "3.0.10" - resolved "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.10.tgz" - integrity sha512-W864tg/Osz1+9f4lrGTZpCSO5/z4608eUp19tbozkq2HJK6i3z1kT0H9tlADXuYIb1YYOBByU4Jsqkk75q48qA== +"@types/mdast@^4.0.0", "@types/mdast@^4.0.2": + version "4.0.4" + resolved "https://registry.yarnpkg.com/@types/mdast/-/mdast-4.0.4.tgz#7ccf72edd2f1aa7dd3437e180c64373585804dd6" + integrity sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA== dependencies: "@types/unist" "*" @@ -2457,6 +2609,13 @@ resolved "https://registry.npmjs.org/@types/ms/-/ms-0.7.31.tgz" integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== +"@types/node-forge@^1.3.0": + version "1.3.11" + resolved "https://registry.yarnpkg.com/@types/node-forge/-/node-forge-1.3.11.tgz#0972ea538ddb0f4d9c2fa0ec5db5724773a604da" + integrity sha512-FQx220y22OKNTqaByeBGqHWYz4cl94tpcxeFdvBo3wjG6XPBuZ0BNgNZRV5J5TFmmcsJ4IzsLkmGRiQbnYsBEQ== + dependencies: + "@types/node" "*" + "@types/node@*": version "18.0.3" resolved "https://registry.npmjs.org/@types/node/-/node-18.0.3.tgz" @@ -2468,16 +2627,21 @@ integrity sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw== "@types/parse-json@^4.0.0": - version "4.0.0" - resolved "https://registry.npmjs.org/@types/parse-json/-/parse-json-4.0.0.tgz" - integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== + version "4.0.2" + resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.2.tgz#5950e50960793055845e956c427fc2b0d70c5239" + integrity sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw== "@types/parse5@^5.0.0": version "5.0.3" resolved "https://registry.npmjs.org/@types/parse5/-/parse5-5.0.3.tgz" integrity sha512-kUNnecmtkunAoQ3CnjmMkzNU/gtxG8guhi+Fk2U/kOpIKjIMKnXGp4IJCgQJrXSgMsWYimYG4TGjz/UzbGEBTw== -"@types/prop-types@*", "@types/prop-types@^15.0.0", "@types/prop-types@^15.7.11": +"@types/prismjs@^1.26.0": + version "1.26.5" + resolved "https://registry.yarnpkg.com/@types/prismjs/-/prismjs-1.26.5.tgz#72499abbb4c4ec9982446509d2f14fb8483869d6" + integrity sha512-AUZTa7hQ2KY5L7AmtSiqxlhWxb4ina0yd8hNbl4TWuqnv/pFP0nDMb3YrfSBf4hJVGLh2YEIBfKaBW/9UEl6IQ== + +"@types/prop-types@*", "@types/prop-types@^15.7.11": version "15.7.11" resolved "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.11.tgz" integrity sha512-ga8y9v9uyeiLdpKddhxYQkxNDrfvuPrlFb0N1qnZZByvcElJaXthF1UhvCh9TLWJBEHeNtdnbysW7Y6Uq8CVng== @@ -2492,7 +2656,7 @@ resolved "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz" integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw== -"@types/react-router-config@*", "@types/react-router-config@^5.0.6": +"@types/react-router-config@*": version "5.0.6" resolved "https://registry.npmjs.org/@types/react-router-config/-/react-router-config-5.0.6.tgz" integrity sha512-db1mx37a1EJDf1XeX8jJN7R3PZABmJQXR8r28yUjVMFSjkmnQo6X6pOEEmNl+Tp2gYQOGPdYbFIipBtdElZ3Yg== @@ -2501,6 +2665,15 @@ "@types/react" "*" "@types/react-router" "*" +"@types/react-router-config@^5.0.7": + version "5.0.11" + resolved "https://registry.yarnpkg.com/@types/react-router-config/-/react-router-config-5.0.11.tgz#2761a23acc7905a66a94419ee40294a65aaa483a" + integrity sha512-WmSAg7WgqW7m4x8Mt4N6ZyKz0BubSj/2tVUMsAHp+Yd2AMwcSbeFq9WympT19p5heCFmF97R9eD5uUR/t4HEqw== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-router" "^5.1.0" + "@types/react-router-dom@*": version "5.3.3" resolved "https://registry.npmjs.org/@types/react-router-dom/-/react-router-dom-5.3.3.tgz" @@ -2518,6 +2691,14 @@ "@types/history" "^4.7.11" "@types/react" "*" +"@types/react-router@^5.1.0": + version "5.1.20" + resolved "https://registry.yarnpkg.com/@types/react-router/-/react-router-5.1.20.tgz#88eccaa122a82405ef3efbcaaa5dcdd9f021387c" + integrity sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q== + dependencies: + "@types/history" "^4.7.11" + "@types/react" "*" + "@types/react-transition-group@^4.4.9": version "4.4.9" resolved "https://registry.npmjs.org/@types/react-transition-group/-/react-transition-group-4.4.9.tgz" @@ -2573,18 +2754,40 @@ dependencies: "@types/node" "*" -"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2", "@types/unist@^2.0.3": +"@types/unist@*", "@types/unist@^2.0.0", "@types/unist@^2.0.2": version "2.0.6" resolved "https://registry.npmjs.org/@types/unist/-/unist-2.0.6.tgz" integrity sha512-PBjIUxZHOuj0R15/xuwJYjFi+KZdNFrehocChv4g5hu6aFroHue8m0lBP0POdK2nKzbw0cgV1mws8+V/JAcEkQ== -"@types/ws@^8.5.1": - version "8.5.3" - resolved "https://registry.npmjs.org/@types/ws/-/ws-8.5.3.tgz" - integrity sha512-6YOoWjruKj1uLf3INHH7D3qTXwFfEsg1kf3c0uDdSBJwfa/llkwIjrAGV7j7mVgGNbzTQ3HiHKKDXl6bJPD97w== +"@types/unist@^3.0.0": + version "3.0.3" + resolved "https://registry.yarnpkg.com/@types/unist/-/unist-3.0.3.tgz#acaab0f919ce69cce629c2d4ed2eb4adc1b6c20c" + integrity sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q== + +"@types/ws@^8.5.5": + version "8.5.13" + resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.13.tgz#6414c280875e2691d0d1e080b05addbf5cb91e20" + integrity sha512-osM/gWBTPKgHV8XkTunnegTRIsvF6owmf5w+JtAfOw472dptdm0dlGv4xCt6GwQRcC2XVOvvRE/0bAoQcL2QkA== dependencies: "@types/node" "*" +"@types/yargs-parser@*": + version "21.0.3" + resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" + integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ== + +"@types/yargs@^17.0.8": + version "17.0.33" + resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d" + integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA== + dependencies: + "@types/yargs-parser" "*" + +"@ungap/structured-clone@^1.0.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@ungap/structured-clone/-/structured-clone-1.2.0.tgz#756641adb587851b5ccb3e095daf27ae581c8406" + integrity sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ== + "@webassemblyjs/ast@1.12.1", "@webassemblyjs/ast@^1.12.1": version "1.12.1" resolved "https://registry.yarnpkg.com/@webassemblyjs/ast/-/ast-1.12.1.tgz#bb16a0e8b1914f979f45864c23819cc3e3f0d4bb" @@ -2729,11 +2932,6 @@ accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.8: mime-types "~2.1.34" negotiator "0.6.3" -acorn-import-attributes@^1.9.5: - version "1.9.5" - resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" - integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== - acorn-jsx@^5.0.0: version "5.3.2" resolved "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz" @@ -2768,7 +2966,12 @@ acorn@^8.0.0, acorn@^8.0.4, acorn@^8.5.0: resolved "https://registry.npmjs.org/acorn/-/acorn-8.8.2.tgz" integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== -acorn@^8.7.1, acorn@^8.8.2: +acorn@^8.14.0: + version "8.14.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.14.0.tgz#063e2c70cac5fb4f6467f0b11152e04c682795b0" + integrity sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA== + +acorn@^8.8.2: version "8.12.1" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.1.tgz#71616bdccbe25e27a54439e0046e89ca76df2248" integrity sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg== @@ -2798,14 +3001,14 @@ ajv-keywords@^3.4.1, ajv-keywords@^3.5.2: resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz" integrity sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ== -ajv-keywords@^5.0.0: +ajv-keywords@^5.0.0, ajv-keywords@^5.1.0: version "5.1.0" resolved "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz" integrity sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw== dependencies: fast-deep-equal "^3.1.3" -ajv@^6.12.2, ajv@^6.12.4, ajv@^6.12.5: +ajv@^6.12.2, ajv@^6.12.5: version "6.12.6" resolved "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz" integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== @@ -2825,40 +3028,77 @@ ajv@^8.0.0, ajv@^8.8.0: require-from-string "^2.0.2" uri-js "^4.2.2" -algoliasearch-helper@^3.10.0: - version "3.10.0" - resolved "https://registry.npmjs.org/algoliasearch-helper/-/algoliasearch-helper-3.10.0.tgz" - integrity sha512-4E4od8qWWDMVvQ3jaRX6Oks/k35ywD011wAA4LbYMMjOtaZV6VWaTjRr4iN2bdaXP2o1BP7SLFMBf3wvnHmd8Q== +ajv@^8.9.0: + version "8.17.1" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.17.1.tgz#37d9a5c776af6bc92d7f4f9510eba4c0a60d11a6" + integrity sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g== + dependencies: + fast-deep-equal "^3.1.3" + fast-uri "^3.0.1" + json-schema-traverse "^1.0.0" + require-from-string "^2.0.2" + +algoliasearch-helper@^3.13.3: + version "3.22.5" + resolved "https://registry.yarnpkg.com/algoliasearch-helper/-/algoliasearch-helper-3.22.5.tgz#2fcc26814e10a121a2c2526a1b05c754061c56c0" + integrity sha512-lWvhdnc+aKOKx8jyA3bsdEgHzm/sglC4cYdMG4xSQyRiPLJVJtH/IVYZG3Hp6PkTEhQqhyVYkeP9z2IlcHJsWw== dependencies: "@algolia/events" "^4.0.1" -algoliasearch@^4.0.0, algoliasearch@^4.13.1: - version "4.13.1" - resolved "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.13.1.tgz" - integrity sha512-dtHUSE0caWTCE7liE1xaL+19AFf6kWEcyn76uhcitWpntqvicFHXKFoZe5JJcv9whQOTRM6+B8qJz6sFj+rDJA== - dependencies: - "@algolia/cache-browser-local-storage" "4.13.1" - "@algolia/cache-common" "4.13.1" - "@algolia/cache-in-memory" "4.13.1" - "@algolia/client-account" "4.13.1" - "@algolia/client-analytics" "4.13.1" - "@algolia/client-common" "4.13.1" - "@algolia/client-personalization" "4.13.1" - "@algolia/client-search" "4.13.1" - "@algolia/logger-common" "4.13.1" - "@algolia/logger-console" "4.13.1" - "@algolia/requester-browser-xhr" "4.13.1" - "@algolia/requester-common" "4.13.1" - "@algolia/requester-node-http" "4.13.1" - "@algolia/transporter" "4.13.1" - -ansi-align@^3.0.0, ansi-align@^3.0.1: +algoliasearch@^4.18.0: + version "4.24.0" + resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-4.24.0.tgz#b953b3e2309ef8f25da9de311b95b994ac918275" + integrity sha512-bf0QV/9jVejssFBmz2HQLxUadxk574t4iwjCKp5E7NBzwKkrDEhKPISIIjAU/p6K5qDx3qoeh4+26zWN1jmw3g== + dependencies: + "@algolia/cache-browser-local-storage" "4.24.0" + "@algolia/cache-common" "4.24.0" + "@algolia/cache-in-memory" "4.24.0" + "@algolia/client-account" "4.24.0" + "@algolia/client-analytics" "4.24.0" + "@algolia/client-common" "4.24.0" + "@algolia/client-personalization" "4.24.0" + "@algolia/client-search" "4.24.0" + "@algolia/logger-common" "4.24.0" + "@algolia/logger-console" "4.24.0" + "@algolia/recommend" "4.24.0" + "@algolia/requester-browser-xhr" "4.24.0" + "@algolia/requester-common" "4.24.0" + "@algolia/requester-node-http" "4.24.0" + "@algolia/transporter" "4.24.0" + +algoliasearch@^5.11.0: + version "5.12.0" + resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-5.12.0.tgz#2e822a7916d691e55058ea7dba277d5110969dd0" + integrity sha512-psGBRYdGgik8I6m28iAB8xpubvjEt7UQU+w5MAJUA2324WHiGoHap5BPkkjB14rMaXeRts6pmOsrVIglGyOVwg== + dependencies: + "@algolia/client-abtesting" "5.12.0" + "@algolia/client-analytics" "5.12.0" + "@algolia/client-common" "5.12.0" + "@algolia/client-insights" "5.12.0" + "@algolia/client-personalization" "5.12.0" + "@algolia/client-query-suggestions" "5.12.0" + "@algolia/client-search" "5.12.0" + "@algolia/ingestion" "1.12.0" + "@algolia/monitoring" "1.12.0" + "@algolia/recommend" "5.12.0" + "@algolia/requester-browser-xhr" "5.12.0" + "@algolia/requester-fetch" "5.12.0" + "@algolia/requester-node-http" "5.12.0" + +ansi-align@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz" integrity sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w== dependencies: string-width "^4.1.0" +ansi-escapes@^4.3.2: + version "4.3.2" + resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.2.tgz#6b2291d1db7d98b6521d5f1efa42d0f3a9feb65e" + integrity sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ== + dependencies: + type-fest "^0.21.3" + ansi-html-community@^0.0.8: version "0.0.8" resolved "https://registry.npmjs.org/ansi-html-community/-/ansi-html-community-0.0.8.tgz" @@ -2876,7 +3116,7 @@ ansi-regex@^6.0.1: ansi-styles@^3.2.1: version "3.2.1" - resolved "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz" + resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== dependencies: color-convert "^1.9.0" @@ -2938,11 +3178,6 @@ array-union@^2.1.0: resolved "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -asap@~2.0.3: - version "2.0.6" - resolved "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz" - integrity sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA== - astring@^1.8.0: version "1.8.4" resolved "https://registry.npmjs.org/astring/-/astring-1.8.4.tgz" @@ -2965,24 +3200,17 @@ autocomplete.js@^0.37.0: dependencies: immediate "^3.2.3" -autoprefixer@^10.3.7, autoprefixer@^10.4.7: - version "10.4.7" - resolved "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.7.tgz" - integrity sha512-ypHju4Y2Oav95SipEcCcI5J7CGPuvz8oat7sUtYj3ClK44bldfvtvcxK6IEK++7rqB7YchDGzweZIBG+SD0ZAA== - dependencies: - browserslist "^4.20.3" - caniuse-lite "^1.0.30001335" - fraction.js "^4.2.0" - normalize-range "^0.1.2" - picocolors "^1.0.0" - postcss-value-parser "^4.2.0" - -axios@^0.25.0: - version "0.25.0" - resolved "https://registry.npmjs.org/axios/-/axios-0.25.0.tgz" - integrity sha512-cD8FOb0tRH3uuEe6+evtAbgJtfxr7ly3fQjYcMcuPlgkwVS9xboaVIpcDV+cYQe+yGykgwZCs1pzjntcGa6l5g== +autoprefixer@^10.4.14, autoprefixer@^10.4.19: + version "10.4.20" + resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.4.20.tgz#5caec14d43976ef42e32dcb4bd62878e96be5b3b" + integrity sha512-XY25y5xSv/wEoqzDyXXME4AFfkZI0P23z6Fs3YgymDnKJkCGOnkL0iTxCa85UTqaSgfcqyf3UA6+c7wUvx/16g== dependencies: - follow-redirects "^1.14.7" + browserslist "^4.23.3" + caniuse-lite "^1.0.30001646" + fraction.js "^4.3.7" + normalize-range "^0.1.2" + picocolors "^1.0.1" + postcss-value-parser "^4.2.0" axios@^1.5.0: version "1.7.2" @@ -2998,23 +3226,13 @@ b4a@^1.6.4: resolved "https://registry.yarnpkg.com/b4a/-/b4a-1.6.6.tgz#a4cc349a3851987c3c4ac2d7785c18744f6da9ba" integrity sha512-5Tk1HLk6b6ctmjIkAcU/Ujv/1WqiDl0F0JdRCR80VsOcUlHcu7pWeWRlOqQLHfDEsVx9YH/aif5AG4ehoCtTmg== -babel-loader@^8.2.5: - version "8.2.5" - resolved "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.5.tgz" - integrity sha512-OSiFfH89LrEMiWd4pLNqGz4CwJDtbs2ZVc+iGu2HrkRfPxId9F2anQj38IxWpmRfsUY0aBZYi1EFcd3mhtRMLQ== +babel-loader@^9.2.1: + version "9.2.1" + resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-9.2.1.tgz#04c7835db16c246dd19ba0914418f3937797587b" + integrity sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA== dependencies: - find-cache-dir "^3.3.1" - loader-utils "^2.0.0" - make-dir "^3.1.0" - schema-utils "^2.6.5" - -babel-plugin-apply-mdx-type-prop@1.6.22: - version "1.6.22" - resolved "https://registry.npmjs.org/babel-plugin-apply-mdx-type-prop/-/babel-plugin-apply-mdx-type-prop-1.6.22.tgz" - integrity sha512-VefL+8o+F/DfK24lPZMtJctrCVOfgbqLAGZSkxwhazQv4VxPg3Za/i40fu22KR2m8eEda+IfSOlPLUSIiLcnCQ== - dependencies: - "@babel/helper-plugin-utils" "7.10.4" - "@mdx-js/util" "1.6.22" + find-cache-dir "^4.0.0" + schema-utils "^4.0.0" babel-plugin-dynamic-import-node@^2.3.3: version "2.3.3" @@ -3023,45 +3241,38 @@ babel-plugin-dynamic-import-node@^2.3.3: dependencies: object.assign "^4.1.0" -babel-plugin-extract-import-names@1.6.22: - version "1.6.22" - resolved "https://registry.npmjs.org/babel-plugin-extract-import-names/-/babel-plugin-extract-import-names-1.6.22.tgz" - integrity sha512-yJ9BsJaISua7d8zNT7oRG1ZLBJCIdZ4PZqmH8qa9N5AK01ifk3fnkc98AXhtzE7UkfCsEumvoQWgoYLhOnJ7jQ== - dependencies: - "@babel/helper-plugin-utils" "7.10.4" - babel-plugin-macros@^3.1.0: version "3.1.0" - resolved "https://registry.npmjs.org/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-3.1.0.tgz#9ef6dc74deb934b4db344dc973ee851d148c50c1" integrity sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg== dependencies: "@babel/runtime" "^7.12.5" cosmiconfig "^7.0.0" resolve "^1.19.0" -babel-plugin-polyfill-corejs2@^0.3.1: - version "0.3.1" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.3.1.tgz" - integrity sha512-v7/T6EQcNfVLfcN2X8Lulb7DjprieyLWJK/zOWH5DUYcAgex9sP3h25Q+DLsX9TloXe3y1O8l2q2Jv9q8UVB9w== +babel-plugin-polyfill-corejs2@^0.4.10: + version "0.4.11" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz#30320dfe3ffe1a336c15afdcdafd6fd615b25e33" + integrity sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q== dependencies: - "@babel/compat-data" "^7.13.11" - "@babel/helper-define-polyfill-provider" "^0.3.1" - semver "^6.1.1" + "@babel/compat-data" "^7.22.6" + "@babel/helper-define-polyfill-provider" "^0.6.2" + semver "^6.3.1" -babel-plugin-polyfill-corejs3@^0.5.2: - version "0.5.2" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.5.2.tgz" - integrity sha512-G3uJih0XWiID451fpeFaYGVuxHEjzKTHtc9uGFEjR6hHrvNzeS/PX+LLLcetJcytsB5m4j+K3o/EpXJNb/5IEQ== +babel-plugin-polyfill-corejs3@^0.10.6: + version "0.10.6" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.6.tgz#2deda57caef50f59c525aeb4964d3b2f867710c7" + integrity sha512-b37+KR2i/khY5sKmWNVQAnitvquQbNdWy6lJdsr0kmquCKEEUgMKK4SboVM3HtfnZilfjr4MMQ7vY58FVWDtIA== dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.1" - core-js-compat "^3.21.0" + "@babel/helper-define-polyfill-provider" "^0.6.2" + core-js-compat "^3.38.0" -babel-plugin-polyfill-regenerator@^0.3.1: - version "0.3.1" - resolved "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.3.1.tgz" - integrity sha512-Y2B06tvgHYt1x0yz17jGkGeeMr5FeKUu+ASJ+N6nB5lQ8Dapfg42i0OVrf8PNGJ3zKL4A23snMi1IRwrqqND7A== +babel-plugin-polyfill-regenerator@^0.6.1: + version "0.6.2" + resolved "https://registry.yarnpkg.com/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz#addc47e240edd1da1058ebda03021f382bba785e" + integrity sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg== dependencies: - "@babel/helper-define-polyfill-provider" "^0.3.1" + "@babel/helper-define-polyfill-provider" "^0.6.2" bail@^1.0.0: version "1.0.5" @@ -3111,11 +3322,6 @@ bare-stream@^2.0.0: dependencies: streamx "^2.18.0" -base16@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/base16/-/base16-1.0.0.tgz" - integrity sha512-pNdYkNPiJUnEhnfXV56+sQy8+AaPcG3POZAUnwr4EeqCUZFz4u2PePbo3e5Gj4ziYPCWGUZT9RHisvJKnwFuBQ== - base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" @@ -3192,20 +3398,6 @@ boolbase@^1.0.0: resolved "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz" integrity sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww== -boxen@^5.0.0: - version "5.1.2" - resolved "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz" - integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ== - dependencies: - ansi-align "^3.0.0" - camelcase "^6.2.0" - chalk "^4.1.0" - cli-boxes "^2.2.1" - string-width "^4.2.2" - type-fest "^0.20.2" - widest-line "^3.1.0" - wrap-ansi "^7.0.0" - boxen@^6.2.1: version "6.2.1" resolved "https://registry.npmjs.org/boxen/-/boxen-6.2.1.tgz" @@ -3256,7 +3448,7 @@ braces@~3.0.2: dependencies: fill-range "^7.0.1" -browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4.20.3, browserslist@^4.21.1, browserslist@^4.21.3: +browserslist@^4.0.0, browserslist@^4.18.1: version "4.21.4" resolved "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz" integrity sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw== @@ -3266,15 +3458,15 @@ browserslist@^4.0.0, browserslist@^4.16.6, browserslist@^4.18.1, browserslist@^4 node-releases "^2.0.6" update-browserslist-db "^1.0.9" -browserslist@^4.21.10: - version "4.23.3" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.23.3.tgz#debb029d3c93ebc97ffbc8d9cbb03403e227c800" - integrity sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA== +browserslist@^4.23.0, browserslist@^4.23.3, browserslist@^4.24.0, browserslist@^4.24.2: + version "4.24.2" + resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.24.2.tgz#f5845bc91069dbd55ee89faf9822e1d885d16580" + integrity sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg== dependencies: - caniuse-lite "^1.0.30001646" - electron-to-chromium "^1.5.4" + caniuse-lite "^1.0.30001669" + electron-to-chromium "^1.5.41" node-releases "^2.0.18" - update-browserslist-db "^1.1.0" + update-browserslist-db "^1.1.1" buffer-from@^1.0.0: version "1.1.2" @@ -3325,19 +3517,6 @@ cacheable-request@^10.2.8: normalize-url "^8.0.0" responselike "^3.0.0" -cacheable-request@^6.0.0: - version "6.1.0" - resolved "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz" - integrity sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg== - dependencies: - clone-response "^1.0.2" - get-stream "^5.1.0" - http-cache-semantics "^4.0.0" - keyv "^3.0.0" - lowercase-keys "^2.0.0" - normalize-url "^4.1.0" - responselike "^1.0.2" - call-bind@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz" @@ -3359,7 +3538,7 @@ call-bind@^1.0.7: callsites@^3.0.0: version "3.1.0" - resolved "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== camel-case@^4.1.2: @@ -3370,7 +3549,7 @@ camel-case@^4.1.2: pascal-case "^3.1.2" tslib "^2.0.3" -camelcase-css@2.0.1, camelcase-css@^2.0.1: +camelcase-css@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz" integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== @@ -3395,7 +3574,7 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" -caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001335, caniuse-lite@^1.0.30001400: +caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001400: version "1.0.30001445" resolved "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001445.tgz" integrity sha512-8sdQIdMztYmzfTMO6KfLny878Ln9c2M0fc7EH60IjlP4Dc4PiCy7K2Vl3ITmWgOyPgVQKa5x+UP/KqFsxj4mBg== @@ -3405,26 +3584,26 @@ caniuse-lite@^1.0.30001646: resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001653.tgz#b8af452f8f33b1c77f122780a4aecebea0caca56" integrity sha512-XGWQVB8wFQ2+9NZwZ10GxTYC5hk0Fa+q8cSkr0tgvMhYhMHP/QC+WTgrePMDBWiWc/pV+1ik82Al20XOK25Gcw== -ccount@^1.0.0: - version "1.1.0" - resolved "https://registry.npmjs.org/ccount/-/ccount-1.1.0.tgz" - integrity sha512-vlNK021QdI7PNeiUh/lKkC/mNHHfV0m/Ad5JoI0TYtlBnJAslM/JIkm/tGC88bkLIwO6OQ5uV6ztS6kVAtCDlg== +caniuse-lite@^1.0.30001669: + version "1.0.30001677" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001677.tgz#27c2e2c637e007cfa864a16f7dfe7cde66b38b5f" + integrity sha512-fmfjsOlJUpMWu+mAAtZZZHz7UEwsUxIIvu1TJfO1HqFQvB/B+ii0xr9B5HpbZY/mC4XZ8SvjHJqtAY6pDPQEog== ccount@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz" integrity sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg== -chalk@^2.0.0, chalk@^2.4.2: +chalk@^2.4.2: version "2.4.2" - resolved "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== dependencies: ansi-styles "^3.2.1" escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.1.0, chalk@^4.1.2: +chalk@^4.0.0, chalk@^4.1.0, chalk@^4.1.2: version "4.1.2" resolved "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -3437,36 +3616,26 @@ chalk@^5.0.0, chalk@^5.0.1, chalk@^5.2.0, chalk@^5.3.0: resolved "https://registry.yarnpkg.com/chalk/-/chalk-5.3.0.tgz#67c20a7ebef70e7f3970a01f90fa210cb6860385" integrity sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w== +char-regex@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/char-regex/-/char-regex-1.0.2.tgz#d744358226217f981ed58f479b1d6bcc29545dcf" + integrity sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw== + character-entities-html4@^2.0.0: version "2.1.0" resolved "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz" integrity sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA== -character-entities-legacy@^1.0.0: - version "1.1.4" - resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-1.1.4.tgz" - integrity sha512-3Xnr+7ZFS1uxeiUDvV02wQ+QDbc55o97tIV5zHScSPJpcLm/r0DFPcoY3tYRp+VZukxuMeKgXYmsXQHO05zQeA== - character-entities-legacy@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz" integrity sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ== -character-entities@^1.0.0: - version "1.2.4" - resolved "https://registry.npmjs.org/character-entities/-/character-entities-1.2.4.tgz" - integrity sha512-iBMyeEHxfVnIakwOuDXpVkc54HijNgCyQB2w0VfGQThle6NXn50zU6V/u+LDhxHcDUPojn6Kpga3PTAD8W1bQw== - character-entities@^2.0.0: version "2.0.2" resolved "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz" integrity sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ== -character-reference-invalid@^1.0.0: - version "1.1.4" - resolved "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-1.1.4.tgz" - integrity sha512-mKKUkUbhPpQlCOfIuZkvSEgktjPFIsZKRRbC6KWVEMvlzblj3i3asQv5ODsrwt0N3pHAEvjP8KTQPHkp0+6jOg== - character-reference-invalid@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz" @@ -3484,7 +3653,7 @@ cheerio-select@^2.1.0: domhandler "^5.0.3" domutils "^3.0.1" -cheerio@^1.0.0-rc.12: +cheerio@1.0.0-rc.12, cheerio@^1.0.0-rc.12: version "1.0.0-rc.12" resolved "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz" integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== @@ -3522,38 +3691,35 @@ chrome-trace-event@^1.0.2: resolved "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz" integrity sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg== -ci-info@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - ci-info@^3.2.0: version "3.9.0" resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4" integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ== -classnames@^2.2.6, classnames@^2.3.2: - version "2.3.2" - resolved "https://registry.npmjs.org/classnames/-/classnames-2.3.2.tgz" - integrity sha512-CSbhY4cFEJRe6/GQzIk5qXZ4Jeg5pcsP7b5peFSDpffpe1cqjASH/n9UTjBwOp6XpMSTwQ8Za2K5V02ueA7Tmw== +classnames@^2.5.1: + version "2.5.1" + resolved "https://registry.yarnpkg.com/classnames/-/classnames-2.5.1.tgz#ba774c614be0f016da105c858e7159eae8e7687b" + integrity sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow== -clean-css@^5.2.2, clean-css@^5.3.0: +clean-css@^5.2.2: version "5.3.0" resolved "https://registry.npmjs.org/clean-css/-/clean-css-5.3.0.tgz" integrity sha512-YYuuxv4H/iNb1Z/5IbMRoxgrzjWGhOEFfd+groZ5dMCVkpENiMZmwspdrzBo9286JjM1gZJPAyL7ZIdzuvu2AQ== dependencies: source-map "~0.6.0" +clean-css@^5.3.2, clean-css@~5.3.2: + version "5.3.3" + resolved "https://registry.yarnpkg.com/clean-css/-/clean-css-5.3.3.tgz#b330653cd3bd6b75009cc25c714cae7b93351ccd" + integrity sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg== + dependencies: + source-map "~0.6.0" + clean-stack@^2.0.0: version "2.2.0" resolved "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz" integrity sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A== -cli-boxes@^2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz" - integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw== - cli-boxes@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/cli-boxes/-/cli-boxes-3.0.0.tgz" @@ -3571,10 +3737,10 @@ cli-spinners@^2.9.0: resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-2.9.2.tgz#1773a8f4b9c4d6ac31563df53b3fc1d79462fe41" integrity sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg== -cli-table3@^0.6.2: - version "0.6.2" - resolved "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.2.tgz" - integrity sha512-QyavHCaIC80cMivimWu4aWHilIpiDpfm3hGmqAmXVL1UsnbLuBSMd21hTX6VY4ZSDSM73ESLeF8TOYId3rBTbw== +cli-table3@^0.6.3: + version "0.6.5" + resolved "https://registry.yarnpkg.com/cli-table3/-/cli-table3-0.6.5.tgz#013b91351762739c16a9567c21a04632e449bf2f" + integrity sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ== dependencies: string-width "^4.2.0" optionalDependencies: @@ -3589,14 +3755,7 @@ clone-deep@^4.0.1: kind-of "^6.0.2" shallow-clone "^3.0.0" -clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== - dependencies: - mimic-response "^1.0.0" - -clsx@^1.2.0, clsx@^1.2.1: +clsx@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz" integrity sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg== @@ -3606,14 +3765,14 @@ clsx@^2.0.0: resolved "https://registry.npmjs.org/clsx/-/clsx-2.0.0.tgz" integrity sha512-rQ1+kcj+ttHG0MKVGBUXwayCCF1oh39BF5COIpRzuCEv8Mwjv0XucrI2ExNTOn9IlLifGClWQcU9BrZORvtw6Q== -collapse-white-space@^1.0.2: - version "1.0.6" - resolved "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-1.0.6.tgz" - integrity sha512-jEovNnrhMuqyCcjfEJA56v0Xq8SkIoPKDyaHahwo3POf4qcSXqMYuwNcOTzp74vTsR9Tn08z4MxWqAhcekogkQ== +collapse-white-space@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-2.1.0.tgz#640257174f9f42c740b40f3b55ee752924feefca" + integrity sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw== color-convert@^1.9.0: version "1.9.3" - resolved "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz" + resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== dependencies: color-name "1.1.3" @@ -3627,7 +3786,7 @@ color-convert@^2.0.1: color-name@1.1.3: version "1.1.3" - resolved "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz" + resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== color-name@^1.0.0, color-name@~1.1.4: @@ -3656,10 +3815,10 @@ color@^4.0.1, color@^4.2.3: color-convert "^2.0.1" color-string "^1.9.0" -colord@^2.9.1: - version "2.9.2" - resolved "https://registry.npmjs.org/colord/-/colord-2.9.2.tgz" - integrity sha512-Uqbg+J445nc1TKn4FoDPS6ZZqAvEDnwrH42yo8B40JSOgSLxMZ/gt3h4nmCtPLQeXhjJJkqBx7SCY35WnIixaQ== +colord@^2.9.3: + version "2.9.3" + resolved "https://registry.yarnpkg.com/colord/-/colord-2.9.3.tgz#4f8ce919de456f1d5c1c368c307fe20f3e59fb43" + integrity sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw== colorette@^2.0.10: version "2.0.19" @@ -3688,6 +3847,11 @@ comma-separated-tokens@^2.0.0: resolved "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz" integrity sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg== +commander@^10.0.0: + version "10.0.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-10.0.1.tgz#881ee46b4f77d1c1dccc5823433aa39b022cbe06" + integrity sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug== + commander@^11.0.0: version "11.1.0" resolved "https://registry.yarnpkg.com/commander/-/commander-11.1.0.tgz#62fdce76006a68e5c1ab3314dc92e800eb83d906" @@ -3713,10 +3877,10 @@ commander@^8.0.0, commander@^8.3.0: resolved "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz" integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz" - integrity sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg== +common-path-prefix@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/common-path-prefix/-/common-path-prefix-3.0.0.tgz#7d007a7e07c58c4b4d5f433131a19141b29f11e0" + integrity sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w== compressible@~2.0.16: version "2.0.18" @@ -3751,18 +3915,6 @@ config-chain@^1.1.11: ini "^1.3.4" proto-list "~1.2.1" -configstore@^5.0.1: - version "5.0.1" - resolved "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz" - integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA== - dependencies: - dot-prop "^5.2.0" - graceful-fs "^4.1.2" - make-dir "^3.0.0" - unique-string "^2.0.0" - write-file-atomic "^3.0.0" - xdg-basedir "^4.0.0" - configstore@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/configstore/-/configstore-6.0.0.tgz#49eca2ebc80983f77e09394a1a56e0aca8235566" @@ -3779,10 +3931,10 @@ connect-history-api-fallback@^2.0.0: resolved "https://registry.npmjs.org/connect-history-api-fallback/-/connect-history-api-fallback-2.0.0.tgz" integrity sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA== -consola@^2.15.3: - version "2.15.3" - resolved "https://registry.npmjs.org/consola/-/consola-2.15.3.tgz" - integrity sha512-9vAdYbHj6x2fLKC4+oPH0kFzY/orMZyG2Aj+kNylHxKGJ/Ed4dpNyAQYwJOdqO4zdM7XpVHmyejQDcQHrnuXbw== +consola@^3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/consola/-/consola-3.2.3.tgz#0741857aa88cfa0d6fd53f1cff0375136e98502f" + integrity sha512-I5qxpzLv+sJhTVEoLYNcTW+bThDCPsit0vLNKShZx6rLtpilNpmmeTPaeqJb9ZE9dV3DGaeby6Vuhrw38WjeyQ== console-control-strings@^1.0.0: version "1.1.0" @@ -3811,12 +3963,15 @@ content-type@~1.0.5: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -convert-source-map@^1.5.0, convert-source-map@^1.7.0: - version "1.8.0" - resolved "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.8.0.tgz" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" +convert-source-map@^1.5.0: + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== + +convert-source-map@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" + integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== convict@^6.2.4: version "6.2.4" @@ -3836,10 +3991,10 @@ cookie@0.7.1: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.1.tgz#2f73c42142d5d5cf71310a74fc4ae61670e5dbc9" integrity sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w== -copy-text-to-clipboard@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/copy-text-to-clipboard/-/copy-text-to-clipboard-3.0.1.tgz" - integrity sha512-rvVsHrpFcL4F2P8ihsoLdFHmd404+CMg71S756oRSeQgqk51U3kicGdnvfkrxva0xXH92SjGS62B0XIJsbh+9Q== +copy-text-to-clipboard@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/copy-text-to-clipboard/-/copy-text-to-clipboard-3.2.0.tgz#0202b2d9bdae30a49a53f898626dcc3b49ad960b" + integrity sha512-RnJFp1XR/LOBDckxTib5Qjr/PMfkatD0MUCQgdpqS8MdKiNUzBjAQBEN6oUy+jW7LI93BBG3DtMB2KOOKpGs2Q== copy-webpack-plugin@^11.0.0: version "11.0.0" @@ -3853,23 +4008,22 @@ copy-webpack-plugin@^11.0.0: schema-utils "^4.0.0" serialize-javascript "^6.0.0" -core-js-compat@^3.21.0, core-js-compat@^3.22.1: - version "3.23.4" - resolved "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.23.4.tgz" - integrity sha512-RkSRPe+JYEoflcsuxJWaiMPhnZoFS51FcIxm53k4KzhISCBTmaGlto9dTIrYuk0hnJc3G6pKufAKepHnBq6B6Q== +core-js-compat@^3.38.0, core-js-compat@^3.38.1: + version "3.39.0" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.39.0.tgz#b12dccb495f2601dc860bdbe7b4e3ffa8ba63f61" + integrity sha512-VgEUx3VwlExr5no0tXlBt+silBvhTryPwCXRI2Id1PN8WTKu7MreethvddqOubrYxkFdv/RnYrqlv1sFNAUelw== dependencies: - browserslist "^4.21.1" - semver "7.0.0" + browserslist "^4.24.2" -core-js-pure@^3.20.2: - version "3.23.4" - resolved "https://registry.npmjs.org/core-js-pure/-/core-js-pure-3.23.4.tgz" - integrity sha512-lizxkcgj3XDmi7TUBFe+bQ1vNpD5E4t76BrBWI3HdUxdw/Mq1VF4CkiHzIKyieECKtcODK2asJttoofEeUKICQ== +core-js-pure@^3.30.2: + version "3.39.0" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.39.0.tgz#aa0d54d70a15bdc13e7c853db87c10abc30d68f3" + integrity sha512-7fEcWwKI4rJinnK+wLTezeg2smbFFdSBP6E2kQZNbnzM2s1rpKQ6aaRteZSSg7FLU3P0HGGVo/gbpfanU36urg== -core-js@^3.23.3: - version "3.23.4" - resolved "https://registry.npmjs.org/core-js/-/core-js-3.23.4.tgz" - integrity sha512-vjsKqRc1RyAJC3Ye2kYqgfdThb3zYnx9CrqoCcjMOENMtQPC7ZViBvlDxwYU/2z2NI/IPuiXw5mT4hWhddqjzQ== +core-js@^3.31.1: + version "3.39.0" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.39.0.tgz#57f7647f4d2d030c32a72ea23a0555b2eaa30f83" + integrity sha512-raM0ew0/jJUqkJ0E6e8UDtl+y/7ktFivgWvqw8dNSQeNWoSDLvQ1H/RN3aPXB9tBd4/FhyR4RDPGhsNIMsAn7g== core-util-is@~1.0.0: version "1.0.3" @@ -3887,7 +4041,18 @@ cosmiconfig@^6.0.0: path-type "^4.0.0" yaml "^1.7.2" -cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: +cosmiconfig@^7.0.0: + version "7.1.0" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-7.1.0.tgz#1443b9afa596b670082ea46cbd8f6a62b84635f6" + integrity sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA== + dependencies: + "@types/parse-json" "^4.0.0" + import-fresh "^3.2.1" + parse-json "^5.0.0" + path-type "^4.0.0" + yaml "^1.10.0" + +cosmiconfig@^7.0.1: version "7.0.1" resolved "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-7.0.1.tgz" integrity sha512-a1YWNUV2HwGimB7dU2s1wUMurNKjpx60HxBB6xUM8Re+2s1g1IIfJvFR0/iCF+XHdE0GMTKTuLR32UQff4TEyQ== @@ -3898,12 +4063,15 @@ cosmiconfig@^7.0.0, cosmiconfig@^7.0.1: path-type "^4.0.0" yaml "^1.10.0" -cross-fetch@^3.1.5: - version "3.1.5" - resolved "https://registry.npmjs.org/cross-fetch/-/cross-fetch-3.1.5.tgz" - integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== +cosmiconfig@^8.1.3, cosmiconfig@^8.3.5: + version "8.3.6" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.3.6.tgz#060a2b871d66dba6c8538ea1118ba1ac16f5fae3" + integrity sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA== dependencies: - node-fetch "2.6.7" + import-fresh "^3.3.0" + js-yaml "^4.1.0" + parse-json "^5.2.0" + path-type "^4.0.0" cross-spawn@^7.0.3: version "7.0.3" @@ -3914,11 +4082,6 @@ cross-spawn@^7.0.3: shebang-command "^2.0.0" which "^2.0.1" -crypto-random-string@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz" - integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA== - crypto-random-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-4.0.0.tgz#5a3cc53d7dd86183df5da0312816ceeeb5bb1fc2" @@ -3931,36 +4094,36 @@ css-color-names@^0.0.4: resolved "https://registry.npmjs.org/css-color-names/-/css-color-names-0.0.4.tgz" integrity sha512-zj5D7X1U2h2zsXOAM8EyUREBnnts6H+Jm+d1M2DbiQQcUtnqgQsMrdo8JW9R80YFUmIdBZeMu5wvYM7hcgWP/Q== -css-declaration-sorter@^6.3.0: - version "6.3.0" - resolved "https://registry.npmjs.org/css-declaration-sorter/-/css-declaration-sorter-6.3.0.tgz" - integrity sha512-OGT677UGHJTAVMRhPO+HJ4oKln3wkBTwtDFH0ojbqm+MJm6xuDMHp2nkhh/ThaBqq20IbraBQSWKfSLNHQO9Og== +css-declaration-sorter@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/css-declaration-sorter/-/css-declaration-sorter-7.2.0.tgz#6dec1c9523bc4a643e088aab8f09e67a54961024" + integrity sha512-h70rUM+3PNFuaBDTLe8wF/cdWu+dOZmb7pJt8Z2sedYbAcQVQV/tEchueg3GWxwqS0cxtbxmaHEdkNACqcvsow== -css-loader@^6.7.1: - version "6.7.1" - resolved "https://registry.npmjs.org/css-loader/-/css-loader-6.7.1.tgz" - integrity sha512-yB5CNFa14MbPJcomwNh3wLThtkZgcNyI2bNMRt8iE5Z8Vwl7f8vQXFAzn2HDOJvtDq2NTZBUGMSUNNyrv3/+cw== +css-loader@^6.8.1: + version "6.11.0" + resolved "https://registry.yarnpkg.com/css-loader/-/css-loader-6.11.0.tgz#33bae3bf6363d0a7c2cf9031c96c744ff54d85ba" + integrity sha512-CTJ+AEQJjq5NzLga5pE39qdiSV56F8ywCIsqNIRF0r7BDgWsN25aazToqAFg7ZrtA/U016xudB3ffgweORxX7g== dependencies: icss-utils "^5.1.0" - postcss "^8.4.7" - postcss-modules-extract-imports "^3.0.0" - postcss-modules-local-by-default "^4.0.0" - postcss-modules-scope "^3.0.0" + postcss "^8.4.33" + postcss-modules-extract-imports "^3.1.0" + postcss-modules-local-by-default "^4.0.5" + postcss-modules-scope "^3.2.0" postcss-modules-values "^4.0.0" postcss-value-parser "^4.2.0" - semver "^7.3.5" + semver "^7.5.4" -css-minimizer-webpack-plugin@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-4.0.0.tgz" - integrity sha512-7ZXXRzRHvofv3Uac5Y+RkWRNo0ZMlcg8e9/OtrqUYmwDWJo+qs67GvdeFrXLsFb7czKNwjQhPkM0avlIYl+1nA== - dependencies: - cssnano "^5.1.8" - jest-worker "^27.5.1" - postcss "^8.4.13" - schema-utils "^4.0.0" - serialize-javascript "^6.0.0" - source-map "^0.6.1" +css-minimizer-webpack-plugin@^5.0.1: + version "5.0.1" + resolved "https://registry.yarnpkg.com/css-minimizer-webpack-plugin/-/css-minimizer-webpack-plugin-5.0.1.tgz#33effe662edb1a0bf08ad633c32fa75d0f7ec565" + integrity sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg== + dependencies: + "@jridgewell/trace-mapping" "^0.3.18" + cssnano "^6.0.1" + jest-worker "^29.4.3" + postcss "^8.4.24" + schema-utils "^4.0.1" + serialize-javascript "^6.0.1" css-select@^4.1.3: version "4.3.0" @@ -3989,13 +4152,21 @@ css-selector-parser@^1.0.0: resolved "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-1.4.1.tgz" integrity sha512-HYPSb7y/Z7BNDCOrakL4raGO2zltZkbeXyAd6Tg9obzix6QhzxCotdBl6VT0Dv4vZfJGVz3WL/xaEI9Ly3ul0g== -css-tree@^1.1.2, css-tree@^1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/css-tree/-/css-tree-1.1.3.tgz" - integrity sha512-tRpdppF7TRazZrjJ6v3stzv93qxRcSsFmW6cX0Zm2NVKpxE1WV1HblnghVv9TreireHkqI/VDEsfolRF1p6y7Q== +css-tree@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.3.1.tgz#10264ce1e5442e8572fc82fbe490644ff54b5c20" + integrity sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw== + dependencies: + mdn-data "2.0.30" + source-map-js "^1.0.1" + +css-tree@~2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-2.2.1.tgz#36115d382d60afd271e377f9c5f67d02bd48c032" + integrity sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA== dependencies: - mdn-data "2.0.14" - source-map "^0.6.1" + mdn-data "2.0.28" + source-map-js "^1.0.1" css-unit-converter@^1.1.1: version "1.1.2" @@ -4012,79 +4183,90 @@ cssesc@^3.0.0: resolved "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz" integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== -cssnano-preset-advanced@^5.3.8: - version "5.3.8" - resolved "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-5.3.8.tgz" - integrity sha512-xUlLLnEB1LjpEik+zgRNlk8Y/koBPPtONZjp7JKbXigeAmCrFvq9H0pXW5jJV45bQWAlmJ0sKy+IMr0XxLYQZg== - dependencies: - autoprefixer "^10.3.7" - cssnano-preset-default "^5.2.12" - postcss-discard-unused "^5.1.0" - postcss-merge-idents "^5.1.1" - postcss-reduce-idents "^5.2.0" - postcss-zindex "^5.1.0" - -cssnano-preset-default@^5.2.12: - version "5.2.12" - resolved "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-5.2.12.tgz" - integrity sha512-OyCBTZi+PXgylz9HAA5kHyoYhfGcYdwFmyaJzWnzxuGRtnMw/kR6ilW9XzlzlRAtB6PLT/r+prYgkef7hngFew== - dependencies: - css-declaration-sorter "^6.3.0" - cssnano-utils "^3.1.0" - postcss-calc "^8.2.3" - postcss-colormin "^5.3.0" - postcss-convert-values "^5.1.2" - postcss-discard-comments "^5.1.2" - postcss-discard-duplicates "^5.1.0" - postcss-discard-empty "^5.1.1" - postcss-discard-overridden "^5.1.0" - postcss-merge-longhand "^5.1.6" - postcss-merge-rules "^5.1.2" - postcss-minify-font-values "^5.1.0" - postcss-minify-gradients "^5.1.1" - postcss-minify-params "^5.1.3" - postcss-minify-selectors "^5.2.1" - postcss-normalize-charset "^5.1.0" - postcss-normalize-display-values "^5.1.0" - postcss-normalize-positions "^5.1.1" - postcss-normalize-repeat-style "^5.1.1" - postcss-normalize-string "^5.1.0" - postcss-normalize-timing-functions "^5.1.0" - postcss-normalize-unicode "^5.1.0" - postcss-normalize-url "^5.1.0" - postcss-normalize-whitespace "^5.1.1" - postcss-ordered-values "^5.1.3" - postcss-reduce-initial "^5.1.0" - postcss-reduce-transforms "^5.1.0" - postcss-svgo "^5.1.0" - postcss-unique-selectors "^5.1.1" - -cssnano-utils@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-3.1.0.tgz" - integrity sha512-JQNR19/YZhz4psLX/rQ9M83e3z2Wf/HdJbryzte4a3NSuafyp9w/I4U+hx5C2S9g41qlstH7DEWnZaaj83OuEA== +cssnano-preset-advanced@^6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz#82b090872b8f98c471f681d541c735acf8b94d3f" + integrity sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ== + dependencies: + autoprefixer "^10.4.19" + browserslist "^4.23.0" + cssnano-preset-default "^6.1.2" + postcss-discard-unused "^6.0.5" + postcss-merge-idents "^6.0.3" + postcss-reduce-idents "^6.0.3" + postcss-zindex "^6.0.2" + +cssnano-preset-default@^6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz#adf4b89b975aa775f2750c89dbaf199bbd9da35e" + integrity sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg== + dependencies: + browserslist "^4.23.0" + css-declaration-sorter "^7.2.0" + cssnano-utils "^4.0.2" + postcss-calc "^9.0.1" + postcss-colormin "^6.1.0" + postcss-convert-values "^6.1.0" + postcss-discard-comments "^6.0.2" + postcss-discard-duplicates "^6.0.3" + postcss-discard-empty "^6.0.3" + postcss-discard-overridden "^6.0.2" + postcss-merge-longhand "^6.0.5" + postcss-merge-rules "^6.1.1" + postcss-minify-font-values "^6.1.0" + postcss-minify-gradients "^6.0.3" + postcss-minify-params "^6.1.0" + postcss-minify-selectors "^6.0.4" + postcss-normalize-charset "^6.0.2" + postcss-normalize-display-values "^6.0.2" + postcss-normalize-positions "^6.0.2" + postcss-normalize-repeat-style "^6.0.2" + postcss-normalize-string "^6.0.2" + postcss-normalize-timing-functions "^6.0.2" + postcss-normalize-unicode "^6.1.0" + postcss-normalize-url "^6.0.2" + postcss-normalize-whitespace "^6.0.2" + postcss-ordered-values "^6.0.2" + postcss-reduce-initial "^6.1.0" + postcss-reduce-transforms "^6.0.2" + postcss-svgo "^6.0.3" + postcss-unique-selectors "^6.0.4" + +cssnano-utils@^4.0.2: + version "4.0.2" + resolved "https://registry.yarnpkg.com/cssnano-utils/-/cssnano-utils-4.0.2.tgz#56f61c126cd0f11f2eef1596239d730d9fceff3c" + integrity sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ== -cssnano@^5.1.12, cssnano@^5.1.8: - version "5.1.12" - resolved "https://registry.npmjs.org/cssnano/-/cssnano-5.1.12.tgz" - integrity sha512-TgvArbEZu0lk/dvg2ja+B7kYoD7BBCmn3+k58xD0qjrGHsFzXY/wKTo9M5egcUCabPol05e/PVoIu79s2JN4WQ== +cssnano@^6.0.1, cssnano@^6.1.2: + version "6.1.2" + resolved "https://registry.yarnpkg.com/cssnano/-/cssnano-6.1.2.tgz#4bd19e505bd37ee7cf0dc902d3d869f6d79c66b8" + integrity sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA== dependencies: - cssnano-preset-default "^5.2.12" - lilconfig "^2.0.3" - yaml "^1.10.2" + cssnano-preset-default "^6.1.2" + lilconfig "^3.1.1" -csso@^4.2.0: - version "4.2.0" - resolved "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz" - integrity sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA== +csso@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/csso/-/csso-5.0.5.tgz#f9b7fe6cc6ac0b7d90781bb16d5e9874303e2ca6" + integrity sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ== dependencies: - css-tree "^1.1.2" + css-tree "~2.2.0" + +csstype@^3.0.2: + version "3.1.3" + resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.3.tgz#d80ff294d114fb0e6ac500fbf85b60137d7eff81" + integrity sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw== -csstype@^3.0.2, csstype@^3.1.2: +csstype@^3.1.2: version "3.1.2" resolved "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz" integrity sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ== +debounce@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/debounce/-/debounce-1.2.1.tgz#38881d8f4166a5c5848020c11827b834bcb3e0a5" + integrity sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug== + debug@2.6.9, debug@^2.6.0: version "2.6.9" resolved "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz" @@ -4092,6 +4274,13 @@ debug@2.6.9, debug@^2.6.0: dependencies: ms "2.0.0" +debug@4: + version "4.3.7" + resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52" + integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ== + dependencies: + ms "^2.1.3" + debug@^4.0.0, debug@^4.1.0, debug@^4.1.1: version "4.3.4" resolved "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz" @@ -4120,13 +4309,6 @@ decode-named-character-reference@^1.0.0: dependencies: character-entities "^2.0.0" -decompress-response@^3.3.0: - version "3.3.0" - resolved "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz" - integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== - dependencies: - mimic-response "^1.0.0" - decompress-response@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" @@ -4144,6 +4326,11 @@ deepmerge@^4.2.2: resolved "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz" integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== +deepmerge@^4.3.1: + version "4.3.1" + resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a" + integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A== + default-gateway@^6.0.3: version "6.0.3" resolved "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz" @@ -4151,11 +4338,6 @@ default-gateway@^6.0.3: dependencies: execa "^5.0.0" -defer-to-connect@^1.0.1: - version "1.1.3" - resolved "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz" - integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== - defer-to-connect@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" @@ -4227,13 +4409,6 @@ destroy@1.2.0: resolved "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz" integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== -detab@2.0.4: - version "2.0.4" - resolved "https://registry.npmjs.org/detab/-/detab-2.0.4.tgz" - integrity sha512-8zdsQA5bIkoRECvCrNKPla84lyoR7DSAyf7p0YgXzBO9PDJx8KntPUay7NS6yp+KdxdVtiE5SpHKtbp2ZQyA9g== - dependencies: - repeat-string "^1.5.4" - detect-libc@^2.0.0, detect-libc@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.3.tgz#f0cd503b40f9939b894697d19ad50895e30cf700" @@ -4252,13 +4427,13 @@ detect-port-alt@^1.1.6: address "^1.0.1" debug "^2.6.0" -detect-port@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/detect-port/-/detect-port-1.3.0.tgz" - integrity sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ== +detect-port@^1.5.1: + version "1.6.1" + resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.6.1.tgz#45e4073997c5f292b957cb678fb0bb8ed4250a67" + integrity sha512-CmnVc+Hek2egPx1PeTFVta2W78xy2K/9Rkf6cC4T59S50tVnzKj+tnx5mmx5lwvCkujZ4uRrpRSuV+IVs3f90Q== dependencies: address "^1.0.1" - debug "^2.6.0" + debug "4" detective@^5.2.0: version "5.2.1" @@ -4269,16 +4444,18 @@ detective@^5.2.0: defined "^1.0.0" minimist "^1.2.6" +devlop@^1.0.0, devlop@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/devlop/-/devlop-1.1.0.tgz#4db7c2ca4dc6e0e834c30be70c94bbc976dc7018" + integrity sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA== + dependencies: + dequal "^2.0.0" + didyoumean@^1.2.2: version "1.2.2" resolved "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz" integrity sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw== -diff@^5.0.0: - version "5.1.0" - resolved "https://registry.npmjs.org/diff/-/diff-5.1.0.tgz" - integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== - dir-glob@^3.0.1: version "3.0.1" resolved "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz" @@ -4308,21 +4485,21 @@ dns-packet@^5.2.2: dependencies: "@leichtgewicht/ip-codec" "^2.0.1" -docusaurus-lunr-search@^2.3.2: - version "2.3.2" - resolved "https://registry.npmjs.org/docusaurus-lunr-search/-/docusaurus-lunr-search-2.3.2.tgz" - integrity sha512-Ngvm2kXwliWThqAThXI1912rOKHlFL7BjIc+OVNUfzkjpk5ar4TFEh+EUaaMOLw4V0BBko3CW0Ym7prqqm3jLQ== +docusaurus-lunr-search@^3.5.0: + version "3.5.0" + resolved "https://registry.yarnpkg.com/docusaurus-lunr-search/-/docusaurus-lunr-search-3.5.0.tgz#52832d7bdd858ed093e98a743ee7bd4df596cfbf" + integrity sha512-k3zN4jYMi/prWInJILGKOxE+BVcgYinwj9+gcECsYm52tS+4ZKzXQzbPnVJAEXmvKOfFMcDFvS3MSmm6cEaxIQ== dependencies: autocomplete.js "^0.37.0" - classnames "^2.2.6" + clsx "^1.2.1" gauge "^3.0.0" hast-util-select "^4.0.0" hast-util-to-text "^2.0.0" hogan.js "^3.0.2" lunr "^2.3.8" lunr-languages "^1.4.0" + mark.js "^8.11.1" minimatch "^3.0.4" - object-assign "^4.1.1" rehype-parse "^7.0.1" to-vfile "^6.1.0" unified "^9.0.0" @@ -4406,13 +4583,6 @@ dot-case@^3.0.4: no-case "^3.0.4" tslib "^2.0.3" -dot-prop@^5.2.0: - version "5.3.0" - resolved "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz" - integrity sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q== - dependencies: - is-obj "^2.0.0" - dot-prop@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-6.0.1.tgz#fc26b3cf142b9e59b74dbd39ed66ce620c681083" @@ -4420,11 +4590,6 @@ dot-prop@^6.0.1: dependencies: is-obj "^2.0.0" -duplexer3@^0.1.4: - version "0.1.5" - resolved "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz" - integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== - duplexer@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz" @@ -4445,10 +4610,10 @@ electron-to-chromium@^1.4.251: resolved "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz" integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== -electron-to-chromium@^1.5.4: - version "1.5.13" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.13.tgz#1abf0410c5344b2b829b7247e031f02810d442e6" - integrity sha512-lbBcvtIJ4J6sS4tb5TLp1b4LyfCdMkwStzXPyAgVgTRAsep4bvrAGaBOP7ZJtQMNJpSQ9SqG4brWOroNaQtm7Q== +electron-to-chromium@^1.5.41: + version "1.5.50" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.5.50.tgz#d9ba818da7b2b5ef1f3dd32bce7046feb7e93234" + integrity sha512-eMVObiUQ2LdgeO1F/ySTXsvqvxb6ZH2zPGaMYsWzRDdOddUa77tdmI0ltg+L16UpbWdhPmuF3wIQYyQq65WfZw== emoji-regex@^10.2.1: version "10.3.0" @@ -4465,15 +4630,20 @@ emoji-regex@^9.2.2: resolved "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz" integrity sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg== +emojilib@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/emojilib/-/emojilib-2.4.0.tgz#ac518a8bb0d5f76dda57289ccb2fdf9d39ae721e" + integrity sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw== + emojis-list@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== -emoticon@^3.2.0: - version "3.2.0" - resolved "https://registry.npmjs.org/emoticon/-/emoticon-3.2.0.tgz" - integrity sha512-SNujglcLTTg+lDAcApPNgEdudaqQFiAbJCqzjNxJkvN9vAwCGi0uu8IUVvx+f16h+V44KCY6Y2yboroc9pilHg== +emoticon@^4.0.1: + version "4.1.0" + resolved "https://registry.yarnpkg.com/emoticon/-/emoticon-4.1.0.tgz#d5a156868ee173095627a33de3f1e914c3dde79e" + integrity sha512-VWZfnxqwNcc51hIy/sbOdEem6D+cVtpPzEEtVAFdaas30+1dgkyaOQ4sQ6Bp0tOMqWO1v+HQfYaoodOkdhK6SQ== encodeurl@~1.0.2: version "1.0.2" @@ -4505,16 +4675,16 @@ entities@^2.0.0: resolved "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz" integrity sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A== -entities@^3.0.1: - version "3.0.1" - resolved "https://registry.npmjs.org/entities/-/entities-3.0.1.tgz" - integrity sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q== - entities@^4.2.0, entities@^4.3.0: version "4.3.1" resolved "https://registry.npmjs.org/entities/-/entities-4.3.1.tgz" integrity sha512-o4q/dYJlmyjP2zfnaWDUC6A3BQFmVTX+tZPezK7k0GLSU9QYCauscf5Y+qcEPzKL+EixVouYDgLQK5H9GrLpkg== +entities@^4.4.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.5.0.tgz#5d268ea5e7113ec74c4d033b79ea5a35a488fb48" + integrity sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw== + env-ci@^9.1.1: version "9.1.1" resolved "https://registry.yarnpkg.com/env-ci/-/env-ci-9.1.1.tgz#f081684c64a639c6ff5cb801bd70464bd40498a4" @@ -4525,7 +4695,7 @@ env-ci@^9.1.1: error-ex@^1.3.1: version "1.3.2" - resolved "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz" + resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== dependencies: is-arrayish "^0.2.1" @@ -4547,20 +4717,35 @@ es-module-lexer@^1.2.1: resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78" integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw== +esast-util-from-estree@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/esast-util-from-estree/-/esast-util-from-estree-2.0.0.tgz#8d1cfb51ad534d2f159dc250e604f3478a79f1ad" + integrity sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ== + dependencies: + "@types/estree-jsx" "^1.0.0" + devlop "^1.0.0" + estree-util-visit "^2.0.0" + unist-util-position-from-estree "^2.0.0" + +esast-util-from-js@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/esast-util-from-js/-/esast-util-from-js-2.0.1.tgz#5147bec34cc9da44accf52f87f239a40ac3e8225" + integrity sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw== + dependencies: + "@types/estree-jsx" "^1.0.0" + acorn "^8.0.0" + esast-util-from-estree "^2.0.0" + vfile-message "^4.0.0" + escalade@^3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== -escalade@^3.1.2: - version "3.1.2" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.2.tgz#54076e9ab29ea5bf3d8f1ed62acffbb88272df27" - integrity sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA== - -escape-goat@^2.0.0: - version "2.1.1" - resolved "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz" - integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q== +escalade@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.2.0.tgz#011a3f69856ba189dffa7dc8fcce99d2a87903e5" + integrity sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA== escape-goat@^4.0.0: version "4.0.0" @@ -4574,14 +4759,19 @@ escape-html@^1.0.3, escape-html@~1.0.3: escape-string-regexp@^1.0.5: version "1.0.5" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg== escape-string-regexp@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== +escape-string-regexp@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz#4683126b500b61762f2dbebace1806e8be31b1c8" + integrity sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw== + eslint-scope@5.1.1: version "5.1.1" resolved "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz" @@ -4612,43 +4802,59 @@ estraverse@^5.2.0: resolved "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz" integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== -estree-util-attach-comments@^2.0.0: - version "2.1.1" - resolved "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-2.1.1.tgz" - integrity sha512-+5Ba/xGGS6mnwFbXIuQiDPTbuTxuMCooq3arVv7gPZtYpjp+VXH/NkHAP35OOefPhNG/UGqU3vt/LTABwcHX0w== +estree-util-attach-comments@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz#344bde6a64c8a31d15231e5ee9e297566a691c2d" + integrity sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw== dependencies: "@types/estree" "^1.0.0" -estree-util-build-jsx@^2.0.0: - version "2.2.2" - resolved "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-2.2.2.tgz" - integrity sha512-m56vOXcOBuaF+Igpb9OPAy7f9w9OIkb5yhjsZuaPm7HoGi4oTOQi0h2+yZ+AtKklYFZ+rPC4n0wYCJCEU1ONqg== +estree-util-build-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz#b6d0bced1dcc4f06f25cf0ceda2b2dcaf98168f1" + integrity sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ== dependencies: "@types/estree-jsx" "^1.0.0" - estree-util-is-identifier-name "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" estree-walker "^3.0.0" -estree-util-is-identifier-name@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-2.1.0.tgz" - integrity sha512-bEN9VHRyXAUOjkKVQVvArFym08BTWB0aJPppZZr0UNyAqWsLaVfAqP7hbaTJjzHifmB5ebnR8Wm7r7yGN/HonQ== +estree-util-is-identifier-name@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz#0b5ef4c4ff13508b34dcd01ecfa945f61fce5dbd" + integrity sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg== -estree-util-to-js@^1.1.0: - version "1.2.0" - resolved "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-1.2.0.tgz" - integrity sha512-IzU74r1PK5IMMGZXUVZbmiu4A1uhiPgW5hm1GjcOfr4ZzHaMPpLNJjR7HjXiIOzi25nZDrgFTobHTkV5Q6ITjA== +estree-util-scope@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/estree-util-scope/-/estree-util-scope-1.0.0.tgz#9cbdfc77f5cb51e3d9ed4ad9c4adbff22d43e585" + integrity sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + +estree-util-to-js@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz#10a6fb924814e6abb62becf0d2bc4dea51d04f17" + integrity sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg== dependencies: "@types/estree-jsx" "^1.0.0" astring "^1.8.0" source-map "^0.7.0" -estree-util-visit@^1.0.0: - version "1.2.1" - resolved "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-1.2.1.tgz" - integrity sha512-xbgqcrkIVbIG+lI/gzbvd9SGTJL4zqJKBFttUl5pP27KhAjtMKbX/mQXJ7qgyXpMgVy/zvpm0xoQQaGL8OloOw== +estree-util-value-to-estree@^3.0.1: + version "3.2.1" + resolved "https://registry.yarnpkg.com/estree-util-value-to-estree/-/estree-util-value-to-estree-3.2.1.tgz#f8083e56f51efb4889794490730c036ba6167ee6" + integrity sha512-Vt2UOjyPbNQQgT5eJh+K5aATti0OjCIAGc9SgMdOFYbohuifsWclR74l0iZTJwePMgWYdX1hlVS+dedH9XV8kw== + dependencies: + "@types/estree" "^1.0.0" + +estree-util-visit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/estree-util-visit/-/estree-util-visit-2.0.0.tgz#13a9a9f40ff50ed0c022f831ddf4b58d05446feb" + integrity sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww== dependencies: "@types/estree-jsx" "^1.0.0" - "@types/unist" "^2.0.0" + "@types/unist" "^3.0.0" estree-walker@^3.0.0: version "3.0.3" @@ -4662,10 +4868,10 @@ esutils@^2.0.2: resolved "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -eta@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/eta/-/eta-2.0.1.tgz" - integrity sha512-46E2qDPDm7QA+usjffUWz9KfXsxVZclPOuKsXs4ZWZdI/X1wpDF7AO424pt7fdYohCzWsIkXAhNGXSlwo5naAg== +eta@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/eta/-/eta-2.2.0.tgz#eb8b5f8c4e8b6306561a455e62cd7492fe3a9b8a" + integrity sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g== etag@~1.8.1: version "1.8.1" @@ -4811,12 +5017,10 @@ fast-json-stable-stringify@^2.0.0: resolved "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-url-parser@1.1.3: - version "1.1.3" - resolved "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz" - integrity sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ== - dependencies: - punycode "^1.3.2" +fast-uri@^3.0.1: + version "3.0.3" + resolved "https://registry.yarnpkg.com/fast-uri/-/fast-uri-3.0.3.tgz#892a1c91802d5d7860de728f18608a0573142241" + integrity sha512-aLrHthzCjH5He4Z2H9YZ+v6Ujb9ocRuW6ZzkJQOrTxleEijANq4v1TsaPaVG1PZcuurEzrLcWRyYBYXD5cEiaw== fastq@^1.6.0: version "1.13.0" @@ -4825,6 +5029,13 @@ fastq@^1.6.0: dependencies: reusify "^1.0.4" +fault@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/fault/-/fault-2.0.1.tgz#d47ca9f37ca26e4bd38374a7c500b5a384755b6c" + integrity sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ== + dependencies: + format "^0.2.0" + faye-websocket@^0.11.3: version "0.11.4" resolved "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz" @@ -4832,31 +5043,6 @@ faye-websocket@^0.11.3: dependencies: websocket-driver ">=0.5.1" -fbemitter@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/fbemitter/-/fbemitter-3.0.0.tgz" - integrity sha512-KWKaceCwKQU0+HPoop6gn4eOHk50bBv/VxjJtGMfwmJt3D29JpN4H4eisCtIPA+a8GVBam+ldMMpMjJUvpDyHw== - dependencies: - fbjs "^3.0.0" - -fbjs-css-vars@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/fbjs-css-vars/-/fbjs-css-vars-1.0.2.tgz" - integrity sha512-b2XGFAFdWZWg0phtAWLHCk836A1Xann+I+Dgd3Gk64MHKZO44FfoD1KxyvbSh0qZsIoXQGGlVztIY+oitJPpRQ== - -fbjs@^3.0.0, fbjs@^3.0.1: - version "3.0.4" - resolved "https://registry.npmjs.org/fbjs/-/fbjs-3.0.4.tgz" - integrity sha512-ucV0tDODnGV3JCnnkmoszb5lf4bNpzjv80K41wd4k798Etq+UYD0y0TIfalLjZoKgjive6/adkRnszwapiDgBQ== - dependencies: - cross-fetch "^3.1.5" - fbjs-css-vars "^1.0.0" - loose-envify "^1.0.0" - object-assign "^4.1.0" - promise "^7.1.1" - setimmediate "^1.0.5" - ua-parser-js "^0.7.30" - feed@^4.2.2: version "4.2.2" resolved "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz" @@ -4864,6 +5050,13 @@ feed@^4.2.2: dependencies: xml-js "^1.6.11" +figures@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" + integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== + dependencies: + escape-string-regexp "^1.0.5" + file-loader@^6.2.0: version "6.2.0" resolved "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz" @@ -4904,18 +5097,17 @@ finalhandler@1.3.1: statuses "2.0.1" unpipe "~1.0.0" -find-cache-dir@^3.3.1: - version "3.3.2" - resolved "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz" - integrity sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig== +find-cache-dir@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-4.0.0.tgz#a30ee0448f81a3990708f6453633c733e2f6eec2" + integrity sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg== dependencies: - commondir "^1.0.1" - make-dir "^3.0.2" - pkg-dir "^4.1.0" + common-path-prefix "^3.0.0" + pkg-dir "^7.0.0" find-root@^1.1.0: version "1.1.0" - resolved "https://registry.npmjs.org/find-root/-/find-root-1.1.0.tgz" + resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== find-up@^3.0.0: @@ -4925,14 +5117,6 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" -find-up@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - find-up@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz" @@ -4941,15 +5125,20 @@ find-up@^5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" -flux@^4.0.1: - version "4.0.3" - resolved "https://registry.npmjs.org/flux/-/flux-4.0.3.tgz" - integrity sha512-yKAbrp7JhZhj6uiT1FTuVMlIAT1J4jqEyBpFApi1kxpGZCvacMVc/t1pMQyotqHhAgvoE3bNvAykhCo2CLjnYw== +find-up@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/find-up/-/find-up-6.3.0.tgz#2abab3d3280b2dc7ac10199ef324c4e002c8c790" + integrity sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw== dependencies: - fbemitter "^3.0.0" - fbjs "^3.0.1" + locate-path "^7.1.0" + path-exists "^5.0.0" + +flat@^5.0.2: + version "5.0.2" + resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" + integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== -follow-redirects@^1.0.0, follow-redirects@^1.14.7: +follow-redirects@^1.0.0: version "1.15.1" resolved "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.1.tgz" integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== @@ -4992,15 +5181,20 @@ form-data@^4.0.0: combined-stream "^1.0.8" mime-types "^2.1.12" +format@^0.2.0: + version "0.2.2" + resolved "https://registry.yarnpkg.com/format/-/format-0.2.2.tgz#d6170107e9efdc4ed30c9dc39016df942b5cb58b" + integrity sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww== + forwarded@0.2.0: version "0.2.0" resolved "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz" integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow== -fraction.js@^4.2.0: - version "4.2.0" - resolved "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz" - integrity sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA== +fraction.js@^4.3.7: + version "4.3.7" + resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.3.7.tgz#06ca0085157e42fda7f9e726e79fefc4068840f7" + integrity sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew== fresh@0.5.2: version "0.5.2" @@ -5012,7 +5206,7 @@ fs-constants@^1.0.0: resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== -fs-extra@^10.0.0, fs-extra@^10.1.0: +fs-extra@^10.0.0: version "10.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-10.1.0.tgz" integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== @@ -5021,6 +5215,15 @@ fs-extra@^10.0.0, fs-extra@^10.1.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@^11.1.1, fs-extra@^11.2.0: + version "11.2.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-11.2.0.tgz#e70e17dfad64232287d01929399e0ea7c86b0e5b" + integrity sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^9.0.0: version "9.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" @@ -5046,12 +5249,7 @@ fsevents@2.3.2, fsevents@~2.3.2: resolved "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz" integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -function-bind@^1.1.2: +function-bind@^1.1.1, function-bind@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c" integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA== @@ -5071,7 +5269,7 @@ gauge@^3.0.0: strip-ansi "^6.0.1" wide-align "^1.1.2" -gensync@^1.0.0-beta.1, gensync@^1.0.0-beta.2: +gensync@^1.0.0-beta.2: version "1.0.0-beta.2" resolved "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz" integrity sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg== @@ -5101,20 +5299,6 @@ get-own-enumerable-property-symbols@^3.0.0: resolved "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz" integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== -get-stream@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.1.0: - version "5.2.0" - resolved "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - get-stream@^6.0.0, get-stream@^6.0.1: version "6.0.1" resolved "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz" @@ -5125,10 +5309,10 @@ github-from-package@0.0.0: resolved "https://registry.yarnpkg.com/github-from-package/-/github-from-package-0.0.0.tgz#97fb5d96bfde8973313f20e8288ef9a167fa64ce" integrity sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw== -github-slugger@^1.4.0: - version "1.4.0" - resolved "https://registry.npmjs.org/github-slugger/-/github-slugger-1.4.0.tgz" - integrity sha512-w0dzqw/nt51xMVmlaV1+JRzN+oCa1KfcgGEWhxUG16wbdA+Xnt/yoFO8Z8x/V82ZcZ0wy6ln9QDup5avbhiDhQ== +github-slugger@^1.5.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/github-slugger/-/github-slugger-1.5.0.tgz#17891bbc73232051474d68bd867a34625c955f7d" + integrity sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw== glob-parent@^5.1.2, glob-parent@~5.1.2: version "5.1.2" @@ -5236,29 +5420,12 @@ got@^12.1.0: p-cancelable "^3.0.0" responselike "^3.0.0" -got@^9.6.0: - version "9.6.0" - resolved "https://registry.npmjs.org/got/-/got-9.6.0.tgz" - integrity sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q== - dependencies: - "@sindresorhus/is" "^0.14.0" - "@szmarczak/http-timer" "^1.1.2" - cacheable-request "^6.0.0" - decompress-response "^3.3.0" - duplexer3 "^0.1.4" - get-stream "^4.1.0" - lowercase-keys "^1.0.1" - mimic-response "^1.0.1" - p-cancelable "^1.0.0" - to-readable-stream "^1.0.0" - url-parse-lax "^3.0.0" - graceful-fs@4.2.10, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.4, graceful-fs@^4.2.6: version "4.2.10" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== -graceful-fs@^4.2.11: +graceful-fs@^4.2.11, graceful-fs@^4.2.9: version "4.2.11" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3" integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ== @@ -5287,7 +5454,7 @@ handle-thing@^2.0.0: has-flag@^3.0.0: version "3.0.0" - resolved "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz" + resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" integrity sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw== has-flag@^4.0.0: @@ -5324,43 +5491,23 @@ has-unicode@^2.0.1: resolved "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz" integrity sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ== -has-yarn@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz" - integrity sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw== - has-yarn@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/has-yarn/-/has-yarn-3.0.0.tgz#c3c21e559730d1d3b57e28af1f30d06fac38147d" integrity sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA== has@^1.0.3: - version "1.0.3" - resolved "https://registry.npmjs.org/has/-/has-1.0.3.tgz" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" + version "1.0.4" + resolved "https://registry.yarnpkg.com/has/-/has-1.0.4.tgz#2eb2860e000011dae4f1406a86fe80e530fb2ec6" + integrity sha512-qdSAmqLF6209RFj4VVItywPMbm3vWylknmB3nvNiUIs72xAimcM8nVYxYr7ncvZq5qzk9MKIZR8ijqD/1QuYjQ== -hasown@^2.0.0: +hasown@^2.0.0, hasown@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003" integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ== dependencies: function-bind "^1.1.2" -hast-to-hyperscript@^9.0.0: - version "9.0.1" - resolved "https://registry.npmjs.org/hast-to-hyperscript/-/hast-to-hyperscript-9.0.1.tgz" - integrity sha512-zQgLKqF+O2F72S1aa4y2ivxzSlko3MAvxkwG8ehGmNiqd98BIN3JM1rAJPmplEyLmGLO2QZYJtIneOSZ2YbJuA== - dependencies: - "@types/unist" "^2.0.3" - comma-separated-tokens "^1.0.0" - property-information "^5.3.0" - space-separated-tokens "^1.0.0" - style-to-object "^0.3.0" - unist-util-is "^4.0.0" - web-namespaces "^1.0.0" - hast-util-from-parse5@^6.0.0: version "6.0.1" resolved "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-6.0.1.tgz" @@ -5373,6 +5520,20 @@ hast-util-from-parse5@^6.0.0: vfile-location "^3.2.0" web-namespaces "^1.0.0" +hast-util-from-parse5@^8.0.0: + version "8.0.1" + resolved "https://registry.yarnpkg.com/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz#654a5676a41211e14ee80d1b1758c399a0327651" + integrity sha512-Er/Iixbc7IEa7r/XLtuG52zoqn/b3Xng/w6aZQ0xGVxzhw5xUFxcRqdPzP6yFi/4HBYRaifaI5fQ1RH8n0ZeOQ== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + devlop "^1.0.0" + hastscript "^8.0.0" + property-information "^6.0.0" + vfile "^6.0.0" + vfile-location "^5.0.0" + web-namespaces "^2.0.0" + hast-util-has-property@^1.0.0: version "1.0.4" resolved "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-1.0.4.tgz" @@ -5388,21 +5549,31 @@ hast-util-parse-selector@^2.0.0: resolved "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-2.2.5.tgz" integrity sha512-7j6mrk/qqkSehsM92wQjdIgWM2/BW61u/53G6xmC8i1OmEdKLHbk419QKQUjz6LglWsfqoiHmyMRkP1BGjecNQ== -hast-util-raw@6.0.1: - version "6.0.1" - resolved "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-6.0.1.tgz" - integrity sha512-ZMuiYA+UF7BXBtsTBNcLBF5HzXzkyE6MLzJnL605LKE8GJylNjGc4jjxazAHUtcwT5/CEt6afRKViYB4X66dig== - dependencies: - "@types/hast" "^2.0.0" - hast-util-from-parse5 "^6.0.0" - hast-util-to-parse5 "^6.0.0" - html-void-elements "^1.0.0" - parse5 "^6.0.0" - unist-util-position "^3.0.0" - vfile "^4.0.0" - web-namespaces "^1.0.0" - xtend "^4.0.0" - zwitch "^1.0.0" +hast-util-parse-selector@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz#352879fa86e25616036037dd8931fb5f34cb4a27" + integrity sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A== + dependencies: + "@types/hast" "^3.0.0" + +hast-util-raw@^9.0.0: + version "9.0.4" + resolved "https://registry.yarnpkg.com/hast-util-raw/-/hast-util-raw-9.0.4.tgz#2da03e37c46eb1a6f1391f02f9b84ae65818f7ed" + integrity sha512-LHE65TD2YiNsHD3YuXcKPHXPLuYh/gjp12mOfU8jxSrm1f/yJpsb0F/KKljS6U9LJoP0Ux+tCe8iJ2AsPzTdgA== + dependencies: + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + "@ungap/structured-clone" "^1.0.0" + hast-util-from-parse5 "^8.0.0" + hast-util-to-parse5 "^8.0.0" + html-void-elements "^3.0.0" + mdast-util-to-hast "^13.0.0" + parse5 "^7.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" + web-namespaces "^2.0.0" + zwitch "^2.0.0" hast-util-select@^4.0.0: version "4.0.2" @@ -5424,37 +5595,61 @@ hast-util-select@^4.0.0: unist-util-visit "^2.0.0" zwitch "^1.0.0" -hast-util-to-estree@^2.0.0: - version "2.3.2" - resolved "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-2.3.2.tgz" - integrity sha512-YYDwATNdnvZi3Qi84iatPIl1lWpXba1MeNrNbDfJfVzEBZL8uUmtR7mt7bxKBC8kuAuvb0bkojXYZzsNHyHCLg== +hast-util-to-estree@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/hast-util-to-estree/-/hast-util-to-estree-3.1.0.tgz#f2afe5e869ddf0cf690c75f9fc699f3180b51b19" + integrity sha512-lfX5g6hqVh9kjS/B9E2gSkvHH4SZNiQFiqWS0x9fENzEl+8W12RqdRxX6d/Cwxi30tPQs3bIO+aolQJNp1bIyw== dependencies: "@types/estree" "^1.0.0" "@types/estree-jsx" "^1.0.0" - "@types/hast" "^2.0.0" - "@types/unist" "^2.0.0" + "@types/hast" "^3.0.0" comma-separated-tokens "^2.0.0" - estree-util-attach-comments "^2.0.0" - estree-util-is-identifier-name "^2.0.0" - hast-util-whitespace "^2.0.0" - mdast-util-mdx-expression "^1.0.0" - mdast-util-mdxjs-esm "^1.0.0" + devlop "^1.0.0" + estree-util-attach-comments "^3.0.0" + estree-util-is-identifier-name "^3.0.0" + hast-util-whitespace "^3.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" property-information "^6.0.0" space-separated-tokens "^2.0.0" - style-to-object "^0.4.1" - unist-util-position "^4.0.0" + style-to-object "^0.4.0" + unist-util-position "^5.0.0" zwitch "^2.0.0" -hast-util-to-parse5@^6.0.0: - version "6.0.0" - resolved "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-6.0.0.tgz" - integrity sha512-Lu5m6Lgm/fWuz8eWnrKezHtVY83JeRGaNQ2kn9aJgqaxvVkFCZQBEhgodZUDUvoodgyROHDb3r5IxAEdl6suJQ== +hast-util-to-jsx-runtime@^2.0.0: + version "2.3.2" + resolved "https://registry.yarnpkg.com/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.2.tgz#6d11b027473e69adeaa00ca4cfb5bb68e3d282fa" + integrity sha512-1ngXYb+V9UT5h+PxNRa1O1FYguZK/XL+gkeqvp7EdHlB9oHUG0eYRo/vY5inBdcqo3RkPMC58/H94HvkbfGdyg== dependencies: - hast-to-hyperscript "^9.0.0" - property-information "^5.0.0" - web-namespaces "^1.0.0" - xtend "^4.0.0" - zwitch "^1.0.0" + "@types/estree" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/unist" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + hast-util-whitespace "^3.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + property-information "^6.0.0" + space-separated-tokens "^2.0.0" + style-to-object "^1.0.0" + unist-util-position "^5.0.0" + vfile-message "^4.0.0" + +hast-util-to-parse5@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/hast-util-to-parse5/-/hast-util-to-parse5-8.0.0.tgz#477cd42d278d4f036bc2ea58586130f6f39ee6ed" + integrity sha512-3KKrV5ZVI8if87DVSi1vDeByYrkGzg4mEfeu4alwgmmIeARiBLKCZS2uw5Gb6nU9x9Yufyj3iudm6i7nl52PFw== + dependencies: + "@types/hast" "^3.0.0" + comma-separated-tokens "^2.0.0" + devlop "^1.0.0" + property-information "^6.0.0" + space-separated-tokens "^2.0.0" + web-namespaces "^2.0.0" + zwitch "^2.0.0" hast-util-to-string@^1.0.0: version "1.0.4" @@ -5475,10 +5670,12 @@ hast-util-whitespace@^1.0.0: resolved "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-1.0.4.tgz" integrity sha512-I5GTdSfhYfAPNztx2xJRQpG8cuDSNt599/7YUn7Gx/WxNMsG+a835k97TDkFgk123cwjfwINaZknkKkphx/f2A== -hast-util-whitespace@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz" - integrity sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng== +hast-util-whitespace@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz#7778ed9d3c92dd9e8c5c8f648a49c21fc51cb621" + integrity sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw== + dependencies: + "@types/hast" "^3.0.0" hastscript@^6.0.0: version "6.0.0" @@ -5491,6 +5688,17 @@ hastscript@^6.0.0: property-information "^5.0.0" space-separated-tokens "^1.0.0" +hastscript@^8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/hastscript/-/hastscript-8.0.0.tgz#4ef795ec8dee867101b9f23cc830d4baf4fd781a" + integrity sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw== + dependencies: + "@types/hast" "^3.0.0" + comma-separated-tokens "^2.0.0" + hast-util-parse-selector "^4.0.0" + property-information "^6.0.0" + space-separated-tokens "^2.0.0" + he@^1.2.0: version "1.2.0" resolved "https://registry.npmjs.org/he/-/he-1.2.0.tgz" @@ -5553,7 +5761,12 @@ html-entities@^2.3.2: resolved "https://registry.npmjs.org/html-entities/-/html-entities-2.3.3.tgz" integrity sha512-DV5Ln36z34NNTDgnz0EWGBLZENelNAtkiFA4kyNOG2tDI6Mz1uSWiq1wAKdyjnJwyDiDO7Fa2SO1CTxPXL8VxA== -html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0: +html-escaper@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" + integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== + +html-minifier-terser@^6.0.2: version "6.1.0" resolved "https://registry.npmjs.org/html-minifier-terser/-/html-minifier-terser-6.1.0.tgz" integrity sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw== @@ -5566,20 +5779,43 @@ html-minifier-terser@^6.0.2, html-minifier-terser@^6.1.0: relateurl "^0.2.7" terser "^5.10.0" -html-tags@^3.1.0, html-tags@^3.2.0: +html-minifier-terser@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/html-minifier-terser/-/html-minifier-terser-7.2.0.tgz#18752e23a2f0ed4b0f550f217bb41693e975b942" + integrity sha512-tXgn3QfqPIpGl9o+K5tpcj3/MN4SfLtsx2GWwBC3SSd0tXQGyF3gsSqad8loJgKZGM3ZxbYDd5yhiBIdWpmvLA== + dependencies: + camel-case "^4.1.2" + clean-css "~5.3.2" + commander "^10.0.0" + entities "^4.4.0" + param-case "^3.0.4" + relateurl "^0.2.7" + terser "^5.15.1" + +html-tags@^3.1.0: version "3.2.0" resolved "https://registry.npmjs.org/html-tags/-/html-tags-3.2.0.tgz" integrity sha512-vy7ClnArOZwCnqZgvv+ddgHgJiAFXe3Ge9ML5/mBctVJoUoYPCdxVucOywjDARn6CVoh3dRSFdPHy2sX80L0Wg== -html-void-elements@^1.0.0: - version "1.0.5" - resolved "https://registry.npmjs.org/html-void-elements/-/html-void-elements-1.0.5.tgz" - integrity sha512-uE/TxKuyNIcx44cIWnjr/rfIATDH7ZaOMmstu0CwhFG1Dunhlp4OC6/NMbhiwoq5BpW0ubi303qnEk/PZj614w== +html-tags@^3.3.1: + version "3.3.1" + resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.3.1.tgz#a04026a18c882e4bba8a01a3d39cfe465d40b5ce" + integrity sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ== -html-webpack-plugin@^5.5.0: - version "5.5.0" - resolved "https://registry.npmjs.org/html-webpack-plugin/-/html-webpack-plugin-5.5.0.tgz" - integrity sha512-sy88PC2cRTVxvETRgUHFrL4No3UxvcH8G1NepGhqaTT+GXN2kTamqasot0inS5hXeg1cMbFDt27zzo9p35lZVw== +html-url-attributes@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/html-url-attributes/-/html-url-attributes-3.0.1.tgz#83b052cd5e437071b756cd74ae70f708870c2d87" + integrity sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ== + +html-void-elements@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/html-void-elements/-/html-void-elements-3.0.0.tgz#fc9dbd84af9e747249034d4d62602def6517f1d7" + integrity sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg== + +html-webpack-plugin@^5.6.0: + version "5.6.3" + resolved "https://registry.yarnpkg.com/html-webpack-plugin/-/html-webpack-plugin-5.6.3.tgz#a31145f0fee4184d53a794f9513147df1e653685" + integrity sha512-QSf1yjtSAsmf7rYBV7XX86uua4W/vkhIt0xNXKbsi2foEeW7vjJQz4bhnpL3xH+l1ryl1680uNv968Z+X6jSYg== dependencies: "@types/html-minifier-terser" "^6.0.0" html-minifier-terser "^6.0.2" @@ -5607,11 +5843,6 @@ htmlparser2@^8.0.1: domutils "^3.0.1" entities "^4.3.0" -http-cache-semantics@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz" - integrity sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ== - http-cache-semantics@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a" @@ -5708,10 +5939,10 @@ ignore@^5.2.0: resolved "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz" integrity sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ== -image-size@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/image-size/-/image-size-1.0.1.tgz" - integrity sha512-VAwkvNSNGClRw9mDHhc5Efax8PLlsOGcUTh0T/LIriC8vPA3U5PdqXWqkz406MoYHMKW8Uf9gWr05T/rYB44kQ== +image-size@^1.0.2: + version "1.1.1" + resolved "https://registry.yarnpkg.com/image-size/-/image-size-1.1.1.tgz#ddd67d4dc340e52ac29ce5f546a09f4e29e840ac" + integrity sha512-541xKlUw6jr/6gGuk92F+mYM5zaFAc5ahphvkqvNe2bQ6gVBkd6bfrmVJ2t4KDAfikAYZyIqTnktX3i6/aQDrQ== dependencies: queue "6.0.2" @@ -5733,11 +5964,6 @@ import-fresh@^3.1.0, import-fresh@^3.2.1, import-fresh@^3.3.0: parent-module "^1.0.0" resolve-from "^4.0.0" -import-lazy@^2.1.0: - version "2.1.0" - resolved "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz" - integrity sha512-m7ZEHgtw69qOGw+jwxXkHlrlIPdTGkyh66zXZ1ajZbxkDBNjSY/LGbmjc7h0s2ELsUDTAhFr55TrPSSqJGPG0A== - import-lazy@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-4.0.0.tgz#e8eb627483a0a43da3c03f3e35548be5cb0cc153" @@ -5753,10 +5979,10 @@ indent-string@^4.0.0: resolved "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz" integrity sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg== -infima@0.2.0-alpha.43: - version "0.2.0-alpha.43" - resolved "https://registry.npmjs.org/infima/-/infima-0.2.0-alpha.43.tgz" - integrity sha512-2uw57LvUqW0rK/SWYnd/2rRfxNA5DDNOh33jxF7fy46VWoNhGxiUQyVZHbBMjQ33mQem0cjdDVwgWVAmlRfgyQ== +infima@0.2.0-alpha.45: + version "0.2.0-alpha.45" + resolved "https://registry.yarnpkg.com/infima/-/infima-0.2.0-alpha.45.tgz#542aab5a249274d81679631b492973dd2c1e7466" + integrity sha512-uyH0zfr1erU1OohLk0fT4Rrb94AOhguWNOcD9uGrSpRvNB+6gZXUoJX5J0NtvzBO10YZ9PgvA4NFgt+fYg8ojw== inflight@^1.0.4: version "1.0.6" @@ -5766,7 +5992,7 @@ inflight@^1.0.4: once "^1.3.0" wrappy "1" -inherits@2, inherits@2.0.4, inherits@^2.0.0, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: +inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3: version "2.0.4" resolved "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== @@ -5791,6 +6017,11 @@ inline-style-parser@0.1.1: resolved "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz" integrity sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q== +inline-style-parser@0.2.4: + version "0.2.4" + resolved "https://registry.yarnpkg.com/inline-style-parser/-/inline-style-parser-0.2.4.tgz#f4af5fe72e612839fcd453d989a586566d695f22" + integrity sha512-0aO8FkhNZlj/ZIbNi7Lxxr12obT7cL1moPfE4tg1LkX7LlLfC6DeX4l2ZEud1ukP9jNQyNnfzQVqwbwmAATY4Q== + interpret@^1.0.0: version "1.4.0" resolved "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz" @@ -5813,24 +6044,11 @@ ipaddr.js@^2.0.1: resolved "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.0.1.tgz" integrity sha512-1qTgH9NG+IIJ4yfKs2e6Pp1bZg8wbDbKHT21HrLIeYBTRLgMYKnMTPAuI3Lcs61nfx5h1xlXnbJtH1kX5/d/ng== -is-alphabetical@1.0.4, is-alphabetical@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-1.0.4.tgz" - integrity sha512-DwzsA04LQ10FHTZuL0/grVDk4rFoVH1pjAToYwBrHSxcrBIGQuXrQMtD5U1b0U2XVgKZCTLLP8u2Qxqhy3l2Vg== - is-alphabetical@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz" integrity sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ== -is-alphanumerical@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-1.0.4.tgz" - integrity sha512-UzoZUr+XfVz3t3v4KyGEniVL9BDRoQtY7tOyrRybkVNjDFWyo1yhXNGrrBTQxp3ib9BLAWs7k2YKBQsFRkZG9A== - dependencies: - is-alphabetical "^1.0.0" - is-decimal "^1.0.0" - is-alphanumerical@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz" @@ -5841,7 +6059,7 @@ is-alphanumerical@^2.0.0: is-arrayish@^0.2.1: version "0.2.1" - resolved "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== is-arrayish@^0.3.1: @@ -5861,13 +6079,6 @@ is-buffer@^2.0.0: resolved "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz" integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== -is-ci@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz" - integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== - dependencies: - ci-info "^2.0.0" - is-ci@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-3.0.1.tgz#db6ecbed1bd659c43dac0f45661e7674103d1867" @@ -5887,17 +6098,12 @@ is-color-stop@^1.1.0: rgb-regex "^1.0.1" rgba-regex "^1.0.0" -is-core-module@^2.9.0: - version "2.11.0" - resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.11.0.tgz" - integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== +is-core-module@^2.13.0, is-core-module@^2.9.0: + version "2.15.1" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.15.1.tgz#a7363a25bee942fefab0de13bf6aa372c82dcc37" + integrity sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ== dependencies: - has "^1.0.3" - -is-decimal@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-decimal/-/is-decimal-1.0.4.tgz" - integrity sha512-RGdriMmQQvZ2aqaQq3awNA6dCGtKpiDFcOzrTWrDAT2MiWrKQVPmxLGHl7Y2nNu6led0kEyoX0enY0qXYsv9zw== + hasown "^2.0.2" is-decimal@^2.0.0: version "2.0.1" @@ -5931,11 +6137,6 @@ is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: dependencies: is-extglob "^2.1.1" -is-hexadecimal@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-1.0.4.tgz" - integrity sha512-gyPJuv83bHMpocVYoqof5VDiZveEoGoFL8m3BXNb2VW8Xs+rz9kqO8LOQ5DH6EsuvilT1ApazU0pyl+ytbPtlw== - is-hexadecimal@^2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz" @@ -5954,11 +6155,6 @@ is-interactive@^2.0.0: resolved "https://registry.yarnpkg.com/is-interactive/-/is-interactive-2.0.0.tgz#40c57614593826da1100ade6059778d597f16e90" integrity sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ== -is-npm@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz" - integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA== - is-npm@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-6.0.0.tgz#b59e75e8915543ca5d881ecff864077cba095261" @@ -6011,13 +6207,6 @@ is-plain-object@^2.0.4: dependencies: isobject "^3.0.1" -is-reference@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/is-reference/-/is-reference-3.0.1.tgz" - integrity sha512-baJJdQLiYaJdvFbJqXrcGv3WU3QCzBlUcI5QhbesIm6/xPsvmO+2CDoi/GMOFBQEQm+PXkwOPrp9KK5ozZsp2w== - dependencies: - "@types/estree" "*" - is-regexp@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/is-regexp/-/is-regexp-1.0.0.tgz" @@ -6048,16 +6237,6 @@ is-unicode-supported@^1.1.0, is-unicode-supported@^1.3.0: resolved "https://registry.yarnpkg.com/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz#d824984b616c292a2e198207d4a609983842f714" integrity sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ== -is-whitespace-character@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-whitespace-character/-/is-whitespace-character-1.0.4.tgz" - integrity sha512-SDweEzfIZM0SJV0EUga669UTKlmL0Pq8Lno0QDQsPnvECB3IM2aP0gdx5TrU0A01MAPfViaZiI2V1QMZLaKK5w== - -is-word-character@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/is-word-character/-/is-word-character-1.0.4.tgz" - integrity sha512-5SMO8RVennx3nZrqtKwCGyyetPE9VDba5ugvKLaD4KopPG5kR4mQ7tNt/r7feL5yt5h3lpuBbIUmCOG2eSzXHA== - is-wsl@^2.2.0: version "2.2.0" resolved "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz" @@ -6065,11 +6244,6 @@ is-wsl@^2.2.0: dependencies: is-docker "^2.0.0" -is-yarn-global@^0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz" - integrity sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw== - is-yarn-global@^0.4.0: version "0.4.1" resolved "https://registry.yarnpkg.com/is-yarn-global/-/is-yarn-global-0.4.1.tgz#b312d902b313f81e4eaf98b6361ba2b45cd694bb" @@ -6100,7 +6274,19 @@ java-properties@^1.0.2: resolved "https://registry.yarnpkg.com/java-properties/-/java-properties-1.0.2.tgz#ccd1fa73907438a5b5c38982269d0e771fe78211" integrity sha512-qjdpeo2yKlYTH7nFdK0vbZWuTCesk4o63v5iVOlhMQPfuIZQfW/HI35SjfhA+4qpg36rnFSvUK5b1m+ckIblQQ== -jest-worker@^27.4.5, jest-worker@^27.5.1: +jest-util@^29.7.0: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc" + integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA== + dependencies: + "@jest/types" "^29.6.3" + "@types/node" "*" + chalk "^4.0.0" + ci-info "^3.2.0" + graceful-fs "^4.2.9" + picomatch "^2.2.3" + +jest-worker@^27.4.5: version "27.5.1" resolved "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz" integrity sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg== @@ -6109,15 +6295,30 @@ jest-worker@^27.4.5, jest-worker@^27.5.1: merge-stream "^2.0.0" supports-color "^8.0.0" -joi@^17.6.0: - version "17.6.0" - resolved "https://registry.npmjs.org/joi/-/joi-17.6.0.tgz" - integrity sha512-OX5dG6DTbcr/kbMFj0KGYxuew69HPcAE3K/sZpEV2nP6e/j/C0HV+HNiBPCASxdx5T7DMoa0s8UeHWMnb6n2zw== +jest-worker@^29.4.3: + version "29.7.0" + resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-29.7.0.tgz#acad073acbbaeb7262bd5389e1bcf43e10058d4a" + integrity sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw== dependencies: - "@hapi/hoek" "^9.0.0" - "@hapi/topo" "^5.0.0" - "@sideway/address" "^4.1.3" - "@sideway/formula" "^3.0.0" + "@types/node" "*" + jest-util "^29.7.0" + merge-stream "^2.0.0" + supports-color "^8.0.0" + +jiti@^1.20.0: + version "1.21.6" + resolved "https://registry.yarnpkg.com/jiti/-/jiti-1.21.6.tgz#6c7f7398dd4b3142767f9a168af2f317a428d268" + integrity sha512-2yTgeWTWzMWkHu6Jp9NKgePDaYHbntiwvYuuJLbbN9vl7DC9DvXKOB2BC3ZZ92D3cvV/aflH0osDfwpHepQ53w== + +joi@^17.9.2: + version "17.13.3" + resolved "https://registry.yarnpkg.com/joi/-/joi-17.13.3.tgz#0f5cc1169c999b30d344366d384b12d92558bcec" + integrity sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA== + dependencies: + "@hapi/hoek" "^9.3.0" + "@hapi/topo" "^5.1.0" + "@sideway/address" "^4.1.5" + "@sideway/formula" "^3.0.1" "@sideway/pinpoint" "^2.0.0" "js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: @@ -6140,21 +6341,16 @@ js-yaml@^4.1.0: dependencies: argparse "^2.0.1" -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== +jsesc@^3.0.2, jsesc@~3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-3.0.2.tgz#bb8b09a6597ba426425f2e4a07245c3d00b9343e" + integrity sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g== jsesc@~0.5.0: version "0.5.0" resolved "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== -json-buffer@3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz" - integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== - json-buffer@3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" @@ -6175,7 +6371,7 @@ json-schema-traverse@^1.0.0: resolved "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== -json5@^2.1.2, json5@^2.2.2: +json5@^2.1.2, json5@^2.2.3: version "2.2.3" resolved "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz" integrity sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg== @@ -6189,13 +6385,6 @@ jsonfile@^6.0.1: optionalDependencies: graceful-fs "^4.1.6" -keyv@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz" - integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA== - dependencies: - json-buffer "3.0.0" - keyv@^4.5.3: version "4.5.4" resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" @@ -6213,23 +6402,6 @@ kleur@^3.0.3: resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== -kleur@^4.0.3: - version "4.1.5" - resolved "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz" - integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== - -klona@^2.0.5: - version "2.0.5" - resolved "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz" - integrity sha512-pJiBpiXMbt7dkzXe8Ghj/u4FfXOOa98fPW+bihOJ4SjnoijweJrNThJfd3ifXpXhREjpoF2mZVH1GfS9LV3kHQ== - -latest-version@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz" - integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA== - dependencies: - package-json "^6.3.0" - latest-version@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-7.0.0.tgz#843201591ea81a4d404932eeb61240fe04e9e5da" @@ -6237,19 +6409,32 @@ latest-version@^7.0.0: dependencies: package-json "^8.1.0" +launch-editor@^2.6.0: + version "2.9.1" + resolved "https://registry.yarnpkg.com/launch-editor/-/launch-editor-2.9.1.tgz#253f173bd441e342d4344b4dae58291abb425047" + integrity sha512-Gcnl4Bd+hRO9P9icCP/RVVT2o8SFlPXofuCxvA2SaZuH45whSvf5p8x5oih5ftLiVhEI4sp5xDY+R+b3zJBh5w== + dependencies: + picocolors "^1.0.0" + shell-quote "^1.8.1" + leven@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz" integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== -lilconfig@^2.0.3, lilconfig@^2.0.5: +lilconfig@^2.0.5: version "2.0.6" resolved "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz" integrity sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg== +lilconfig@^3.1.1: + version "3.1.2" + resolved "https://registry.yarnpkg.com/lilconfig/-/lilconfig-3.1.2.tgz#e4a7c3cb549e3a606c8dcc32e5ae1005e62c05cb" + integrity sha512-eop+wDAvpItUys0FWkHIKeC9ybYrTGbU41U5K7+bttZZeohvnY7M9dZ5kB21GNWiFT2q1OoPTvncPCgSOVO5ow== + lines-and-columns@^1.1.6: version "1.2.4" - resolved "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz" + resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.2.4.tgz#eca284f75d2965079309dc0ad9255abb2ebc1632" integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== loader-runner@^4.2.0: @@ -6279,13 +6464,6 @@ locate-path@^3.0.0: p-locate "^3.0.0" path-exists "^3.0.0" -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - locate-path@^6.0.0: version "6.0.0" resolved "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz" @@ -6293,26 +6471,23 @@ locate-path@^6.0.0: dependencies: p-locate "^5.0.0" +locate-path@^7.1.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-7.2.0.tgz#69cb1779bd90b35ab1e771e1f2f89a202c2a8a8a" + integrity sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA== + dependencies: + p-locate "^6.0.0" + lodash.clonedeep@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" integrity sha512-H5ZhCF25riFd9uB5UCkVKo61m3S/xZk1x4wA6yp/L3RFP6Z/eHH1ymQcGLo7J3GMPfm0V/7m1tryHuGVxpqEBQ== -lodash.curry@^4.0.1: - version "4.1.1" - resolved "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz" - integrity sha512-/u14pXGviLaweY5JI0IUzgzF2J6Ne8INyzAZjImcryjgkZ+ebruBxy2/JaOOkTqScddcYtakjhSaeemV8lR0tA== - lodash.debounce@^4.0.8: version "4.0.8" resolved "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz" integrity sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow== -lodash.flow@^3.3.0: - version "3.5.0" - resolved "https://registry.npmjs.org/lodash.flow/-/lodash.flow-3.5.0.tgz" - integrity sha512-ff3BX/tSioo+XojX4MOsOMhJw0nZoUEF011LX8g8d3gvjVbxd89cCio4BCXronjxcTUIJUoqKEUA+n4CqvvRPw== - lodash.memoize@^4.1.2: version "4.1.2" resolved "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz" @@ -6323,12 +6498,12 @@ lodash.topath@^4.5.2: resolved "https://registry.npmjs.org/lodash.topath/-/lodash.topath-4.5.2.tgz" integrity sha512-1/W4dM+35DwvE/iEd1M9ekewOSTlpFekhw9mhAtrwjVqUr83/ilQiyAvmg4tVX7Unkcfl1KC+i9WdaT4B6aQcg== -lodash.uniq@4.5.0, lodash.uniq@^4.5.0: +lodash.uniq@^4.5.0: version "4.5.0" resolved "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz" integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ== -lodash@^4.17.19, lodash@^4.17.20, lodash@^4.17.21: +lodash@^4.17.20, lodash@^4.17.21: version "4.17.21" resolved "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -6360,16 +6535,6 @@ lower-case@^2.0.2: dependencies: tslib "^2.0.3" -lowercase-keys@^1.0.0, lowercase-keys@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz" - integrity sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA== - -lowercase-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz" - integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== - lowercase-keys@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" @@ -6399,186 +6564,254 @@ lunr@^2.3.8: resolved "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz" integrity sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow== -make-dir@^3.0.0, make-dir@^3.0.2, make-dir@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== +mark.js@^8.11.1: + version "8.11.1" + resolved "https://registry.yarnpkg.com/mark.js/-/mark.js-8.11.1.tgz#180f1f9ebef8b0e638e4166ad52db879beb2ffc5" + integrity sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ== + +markdown-extensions@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/markdown-extensions/-/markdown-extensions-2.0.0.tgz#34bebc83e9938cae16e0e017e4a9814a8330d3c4" + integrity sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q== + +markdown-table@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-2.0.0.tgz#194a90ced26d31fe753d8b9434430214c011865b" + integrity sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A== dependencies: - semver "^6.0.0" + repeat-string "^1.0.0" -markdown-escapes@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/markdown-escapes/-/markdown-escapes-1.0.4.tgz" - integrity sha512-8z4efJYk43E0upd0NbVXwgSTQs6cT3T06etieCMEg7dRbzCbxUCK/GHlX8mhHRDcp+OLlHkPKsvqQTCvsRl2cg== +markdown-table@^3.0.0: + version "3.0.4" + resolved "https://registry.yarnpkg.com/markdown-table/-/markdown-table-3.0.4.tgz#fe44d6d410ff9d6f2ea1797a3f60aa4d2b631c2a" + integrity sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw== -markdown-extensions@^1.0.0: - version "1.1.1" - resolved "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-1.1.1.tgz" - integrity sha512-WWC0ZuMzCyDHYCasEGs4IPvLyTGftYwh6wIEOULOF0HXcqZlhwRzrK0w2VUlxWA98xnvb/jszw4ZSkJ6ADpM6Q== +mdast-util-directive@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-directive/-/mdast-util-directive-3.0.0.tgz#3fb1764e705bbdf0afb0d3f889e4404c3e82561f" + integrity sha512-JUpYOqKI4mM3sZcNxmF/ox04XYFFkNwr0CFlrQIkCwbvH0xzMCqkMqAde9wRd80VAhaUrwFwKm2nxretdT1h7Q== + dependencies: + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + parse-entities "^4.0.0" + stringify-entities "^4.0.0" + unist-util-visit-parents "^6.0.0" -mdast-squeeze-paragraphs@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/mdast-squeeze-paragraphs/-/mdast-squeeze-paragraphs-4.0.0.tgz" - integrity sha512-zxdPn69hkQ1rm4J+2Cs2j6wDEv7O17TfXTJ33tl/+JPIoEmtV9t2ZzBM5LPHE8QlHsmVD8t3vPKCyY3oH+H8MQ== +mdast-util-find-and-replace@^3.0.0, mdast-util-find-and-replace@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz#a6fc7b62f0994e973490e45262e4bc07607b04e0" + integrity sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA== dependencies: - unist-util-remove "^2.0.0" + "@types/mdast" "^4.0.0" + escape-string-regexp "^5.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" -mdast-util-definitions@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-4.0.0.tgz" - integrity sha512-k8AJ6aNnUkB7IE+5azR9h81O5EQ/cTDXtWdMq9Kk5KcEW/8ritU5CeLg/9HhOC++nALHBlaogJ5jz0Ybk3kPMQ== +mdast-util-from-markdown@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz#4850390ca7cf17413a9b9a0fbefcd1bc0eb4160a" + integrity sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA== dependencies: - unist-util-visit "^2.0.0" + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + decode-named-character-reference "^1.0.0" + devlop "^1.0.0" + mdast-util-to-string "^4.0.0" + micromark "^4.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-decode-string "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-stringify-position "^4.0.0" + +mdast-util-frontmatter@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz#f5f929eb1eb36c8a7737475c7eb438261f964ee8" + integrity sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA== + dependencies: + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + escape-string-regexp "^5.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + micromark-extension-frontmatter "^2.0.0" -mdast-util-definitions@^5.0.0: - version "5.1.2" - resolved "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz" - integrity sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA== +mdast-util-gfm-autolink-literal@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz#abd557630337bd30a6d5a4bd8252e1c2dc0875d5" + integrity sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ== dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - unist-util-visit "^4.0.0" + "@types/mdast" "^4.0.0" + ccount "^2.0.0" + devlop "^1.0.0" + mdast-util-find-and-replace "^3.0.0" + micromark-util-character "^2.0.0" -mdast-util-from-markdown@^1.0.0, mdast-util-from-markdown@^1.1.0: - version "1.3.0" - resolved "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.0.tgz" - integrity sha512-HN3W1gRIuN/ZW295c7zi7g9lVBllMgZE40RxCX37wrTPWXCWtpvOZdfnuK+1WNpvZje6XuJeI3Wnb4TJEUem+g== +mdast-util-gfm-footnote@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz#25a1753c7d16db8bfd53cd84fe50562bd1e6d6a9" + integrity sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ== dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - decode-named-character-reference "^1.0.0" - mdast-util-to-string "^3.1.0" - micromark "^3.0.0" - micromark-util-decode-numeric-character-reference "^1.0.0" - micromark-util-decode-string "^1.0.0" - micromark-util-normalize-identifier "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - unist-util-stringify-position "^3.0.0" - uvu "^0.5.0" + "@types/mdast" "^4.0.0" + devlop "^1.1.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" -mdast-util-mdx-expression@^1.0.0: - version "1.3.2" - resolved "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-1.3.2.tgz" - integrity sha512-xIPmR5ReJDu/DHH1OoIT1HkuybIfRGYRywC+gJtI7qHjCJp/M9jrmBEJW22O8lskDWm562BX2W8TiAwRTb0rKA== +mdast-util-gfm-strikethrough@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz#d44ef9e8ed283ac8c1165ab0d0dfd058c2764c16" + integrity sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg== dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - mdast-util-from-markdown "^1.0.0" - mdast-util-to-markdown "^1.0.0" + "@types/mdast" "^4.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" -mdast-util-mdx-jsx@^2.0.0: - version "2.1.2" - resolved "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-2.1.2.tgz" - integrity sha512-o9vBCYQK5ZLGEj3tCGISJGjvafyHRVJlZmfJzSE7xjiogSzIeph/Z4zMY65q4WGRMezQBeAwPlrdymDYYYx0tA== +mdast-util-gfm-table@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz#7a435fb6223a72b0862b33afbd712b6dae878d38" + integrity sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg== dependencies: - "@types/estree-jsx" "^1.0.0" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - ccount "^2.0.0" - mdast-util-from-markdown "^1.1.0" - mdast-util-to-markdown "^1.3.0" - parse-entities "^4.0.0" - stringify-entities "^4.0.0" - unist-util-remove-position "^4.0.0" - unist-util-stringify-position "^3.0.0" - vfile-message "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + markdown-table "^3.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" -mdast-util-mdx@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-2.0.1.tgz" - integrity sha512-38w5y+r8nyKlGvNjSEqWrhG0w5PmnRA+wnBvm+ulYCct7nsGYhFVb0lljS9bQav4psDAS1eGkP2LMVcZBi/aqw== +mdast-util-gfm-task-list-item@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz#e68095d2f8a4303ef24094ab642e1047b991a936" + integrity sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ== dependencies: - mdast-util-from-markdown "^1.0.0" - mdast-util-mdx-expression "^1.0.0" - mdast-util-mdx-jsx "^2.0.0" - mdast-util-mdxjs-esm "^1.0.0" - mdast-util-to-markdown "^1.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" -mdast-util-mdxjs-esm@^1.0.0: - version "1.3.1" - resolved "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-1.3.1.tgz" - integrity sha512-SXqglS0HrEvSdUEfoXFtcg7DRl7S2cwOXc7jkuusG472Mmjag34DUDeOJUZtl+BVnyeO1frIgVpHlNRWc2gk/w== +mdast-util-gfm@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz#3f2aecc879785c3cb6a81ff3a243dc11eca61095" + integrity sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw== + dependencies: + mdast-util-from-markdown "^2.0.0" + mdast-util-gfm-autolink-literal "^2.0.0" + mdast-util-gfm-footnote "^2.0.0" + mdast-util-gfm-strikethrough "^2.0.0" + mdast-util-gfm-table "^2.0.0" + mdast-util-gfm-task-list-item "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-mdx-expression@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz#43f0abac9adc756e2086f63822a38c8d3c3a5096" + integrity sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ== dependencies: "@types/estree-jsx" "^1.0.0" - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - mdast-util-from-markdown "^1.0.0" - mdast-util-to-markdown "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" -mdast-util-phrasing@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz" - integrity sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg== +mdast-util-mdx-jsx@^3.0.0: + version "3.1.3" + resolved "https://registry.yarnpkg.com/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.3.tgz#76b957b3da18ebcfd0de3a9b4451dcd6fdec2320" + integrity sha512-bfOjvNt+1AcbPLTFMFWY149nJz0OjmewJs3LQQ5pIyVGxP4CdOqNVJL6kTaM5c68p8q82Xv3nCyFfUnuEcH3UQ== dependencies: - "@types/mdast" "^3.0.0" - unist-util-is "^5.0.0" + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" + ccount "^2.0.0" + devlop "^1.1.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + parse-entities "^4.0.0" + stringify-entities "^4.0.0" + unist-util-stringify-position "^4.0.0" + vfile-message "^4.0.0" -mdast-util-to-hast@10.0.1: - version "10.0.1" - resolved "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-10.0.1.tgz" - integrity sha512-BW3LM9SEMnjf4HXXVApZMt8gLQWVNXc3jryK0nJu/rOXPOnlkUjmdkDlmxMirpbU9ILncGFIwLH/ubnWBbcdgA== +mdast-util-mdx@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz#792f9cf0361b46bee1fdf1ef36beac424a099c41" + integrity sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w== dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" - mdast-util-definitions "^4.0.0" - mdurl "^1.0.0" - unist-builder "^2.0.0" - unist-util-generated "^1.0.0" - unist-util-position "^3.0.0" - unist-util-visit "^2.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-mdx-expression "^2.0.0" + mdast-util-mdx-jsx "^3.0.0" + mdast-util-mdxjs-esm "^2.0.0" + mdast-util-to-markdown "^2.0.0" -mdast-util-to-hast@^12.1.0: - version "12.3.0" - resolved "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz" - integrity sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw== +mdast-util-mdxjs-esm@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz#019cfbe757ad62dd557db35a695e7314bcc9fa97" + integrity sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg== dependencies: - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - mdast-util-definitions "^5.0.0" - micromark-util-sanitize-uri "^1.1.0" + "@types/estree-jsx" "^1.0.0" + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + devlop "^1.0.0" + mdast-util-from-markdown "^2.0.0" + mdast-util-to-markdown "^2.0.0" + +mdast-util-phrasing@^4.0.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz#7cc0a8dec30eaf04b7b1a9661a92adb3382aa6e3" + integrity sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w== + dependencies: + "@types/mdast" "^4.0.0" + unist-util-is "^6.0.0" + +mdast-util-to-hast@^13.0.0: + version "13.2.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz#5ca58e5b921cc0a3ded1bc02eed79a4fe4fe41f4" + integrity sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + "@ungap/structured-clone" "^1.0.0" + devlop "^1.0.0" + micromark-util-sanitize-uri "^2.0.0" trim-lines "^3.0.0" - unist-util-generated "^2.0.0" - unist-util-position "^4.0.0" - unist-util-visit "^4.0.0" + unist-util-position "^5.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" -mdast-util-to-markdown@^1.0.0, mdast-util-to-markdown@^1.3.0: - version "1.5.0" - resolved "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz" - integrity sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A== +mdast-util-to-markdown@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.1.tgz#6fdb72cd54ee4e6745e138db003609978a322e94" + integrity sha512-OrkcCoqAkEg9b1ykXBrA0ehRc8H4fGU/03cACmW2xXzau1+dIdS+qJugh1Cqex3hMumSBgSE/5pc7uqP12nLAw== dependencies: - "@types/mdast" "^3.0.0" - "@types/unist" "^2.0.0" + "@types/mdast" "^4.0.0" + "@types/unist" "^3.0.0" longest-streak "^3.0.0" - mdast-util-phrasing "^3.0.0" - mdast-util-to-string "^3.0.0" - micromark-util-decode-string "^1.0.0" - unist-util-visit "^4.0.0" + mdast-util-phrasing "^4.0.0" + mdast-util-to-string "^4.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-decode-string "^2.0.0" + unist-util-visit "^5.0.0" zwitch "^2.0.0" -mdast-util-to-string@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-2.0.0.tgz" - integrity sha512-AW4DRS3QbBayY/jJmD8437V1Gombjf8RSOUCMFBuo5iHi58AGEgVCKQ+ezHkZZDpAQS75hcBMpLqjpJTjtUL7w== - -mdast-util-to-string@^3.0.0, mdast-util-to-string@^3.1.0: - version "3.1.1" - resolved "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.1.1.tgz" - integrity sha512-tGvhT94e+cVnQt8JWE9/b3cUQZWS732TJxXHktvP+BYo62PpYD53Ls/6cC60rW21dW+txxiM4zMdc6abASvZKA== +mdast-util-to-string@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz#7a5121475556a04e7eddeb67b264aae79d312814" + integrity sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg== dependencies: - "@types/mdast" "^3.0.0" + "@types/mdast" "^4.0.0" -mdn-data@2.0.14: - version "2.0.14" - resolved "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz" - integrity sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow== +mdn-data@2.0.28: + version "2.0.28" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.28.tgz#5ec48e7bef120654539069e1ae4ddc81ca490eba" + integrity sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g== -mdurl@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/mdurl/-/mdurl-1.0.1.tgz" - integrity sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g== +mdn-data@2.0.30: + version "2.0.30" + resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.30.tgz#ce4df6f80af6cfbe218ecd5c552ba13c4dfa08cc" + integrity sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA== media-typer@0.3.0: version "0.3.0" @@ -6612,123 +6845,230 @@ methods@~1.1.2: resolved "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz" integrity sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w== -micromark-core-commonmark@^1.0.0, micromark-core-commonmark@^1.0.1: - version "1.0.6" - resolved "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.0.6.tgz" - integrity sha512-K+PkJTxqjFfSNkfAhp4GB+cZPfQd6dxtTXnf+RjZOV7T4EEXnvgzOcnp+eSTmpGk9d1S9sL6/lqrgSNn/s0HZA== +micromark-core-commonmark@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-core-commonmark/-/micromark-core-commonmark-2.0.1.tgz#9a45510557d068605c6e9a80f282b2bb8581e43d" + integrity sha512-CUQyKr1e///ZODyD1U3xit6zXwy1a8q2a1S1HKtIlmgvurrEpaw/Y9y6KSIbF8P59cn/NjzHyO+Q2fAyYLQrAA== dependencies: decode-named-character-reference "^1.0.0" - micromark-factory-destination "^1.0.0" - micromark-factory-label "^1.0.0" - micromark-factory-space "^1.0.0" - micromark-factory-title "^1.0.0" - micromark-factory-whitespace "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-chunked "^1.0.0" - micromark-util-classify-character "^1.0.0" - micromark-util-html-tag-name "^1.0.0" - micromark-util-normalize-identifier "^1.0.0" - micromark-util-resolve-all "^1.0.0" - micromark-util-subtokenize "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.1" - uvu "^0.5.0" + devlop "^1.0.0" + micromark-factory-destination "^2.0.0" + micromark-factory-label "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-factory-title "^2.0.0" + micromark-factory-whitespace "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-html-tag-name "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-directive@^3.0.0: + version "3.0.2" + resolved "https://registry.yarnpkg.com/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz#2eb61985d1995a7c1ff7621676a4f32af29409e8" + integrity sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA== + dependencies: + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-factory-whitespace "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + parse-entities "^4.0.0" -micromark-extension-mdx-expression@^1.0.0: - version "1.0.4" - resolved "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-1.0.4.tgz" - integrity sha512-TCgLxqW6ReQ3AJgtj1P0P+8ZThBTloLbeb7jNaqr6mCOLDpxUiBFE/9STgooMZttEwOQu5iEcCCa3ZSDhY9FGw== +micromark-extension-frontmatter@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz#651c52ffa5d7a8eeed687c513cd869885882d67a" + integrity sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg== dependencies: - micromark-factory-mdx-expression "^1.0.0" - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-events-to-acorn "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" + fault "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-extension-mdx-jsx@^1.0.0: - version "1.0.3" - resolved "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-1.0.3.tgz" - integrity sha512-VfA369RdqUISF0qGgv2FfV7gGjHDfn9+Qfiv5hEwpyr1xscRj/CiVRkU7rywGFCO7JwJ5L0e7CJz60lY52+qOA== +micromark-extension-gfm-autolink-literal@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz#6286aee9686c4462c1e3552a9d505feddceeb935" + integrity sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw== dependencies: - "@types/acorn" "^4.0.0" - estree-util-is-identifier-name "^2.0.0" - micromark-factory-mdx-expression "^1.0.0" - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" - vfile-message "^3.0.0" + micromark-util-character "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-extension-mdx-md@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-1.0.0.tgz" - integrity sha512-xaRAMoSkKdqZXDAoSgp20Azm0aRQKGOl0RrS81yGu8Hr/JhMsBmfs4wR7m9kgVUIO36cMUQjNyiyDKPrsv8gOw== +micromark-extension-gfm-footnote@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz#4dab56d4e398b9853f6fe4efac4fc9361f3e0750" + integrity sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw== + dependencies: + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-gfm-strikethrough@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz#86106df8b3a692b5f6a92280d3879be6be46d923" + integrity sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw== dependencies: - micromark-util-types "^1.0.0" + devlop "^1.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-classify-character "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-extension-mdxjs-esm@^1.0.0: - version "1.0.3" - resolved "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-1.0.3.tgz" - integrity sha512-2N13ol4KMoxb85rdDwTAC6uzs8lMX0zeqpcyx7FhS7PxXomOnLactu8WI8iBNXW8AVyea3KIJd/1CKnUmwrK9A== +micromark-extension-gfm-table@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.0.tgz#5cadedfbb29fca7abf752447967003dc3b6583c9" + integrity sha512-Ub2ncQv+fwD70/l4ou27b4YzfNaCJOvyX4HxXU15m7mpYY+rjuWzsLIPZHJL253Z643RpbcP1oeIJlQ/SKW67g== dependencies: - micromark-core-commonmark "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-events-to-acorn "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - unist-util-position-from-estree "^1.1.0" - uvu "^0.5.0" - vfile-message "^3.0.0" + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-extension-mdxjs@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-1.0.0.tgz" - integrity sha512-TZZRZgeHvtgm+IhtgC2+uDMR7h8eTKF0QUX9YsgoL9+bADBpBY6SiLvWqnBlLbCEevITmTqmEuY3FoxMKVs1rQ== +micromark-extension-gfm-tagfilter@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz#f26d8a7807b5985fba13cf61465b58ca5ff7dc57" + integrity sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg== dependencies: - acorn "^8.0.0" - acorn-jsx "^5.0.0" - micromark-extension-mdx-expression "^1.0.0" - micromark-extension-mdx-jsx "^1.0.0" - micromark-extension-mdx-md "^1.0.0" - micromark-extension-mdxjs-esm "^1.0.0" - micromark-util-combine-extensions "^1.0.0" - micromark-util-types "^1.0.0" + micromark-util-types "^2.0.0" -micromark-factory-destination@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.0.0.tgz" - integrity sha512-eUBA7Rs1/xtTVun9TmV3gjfPz2wEwgK5R5xcbIM5ZYAtvGF6JkyaDsj0agx8urXnO31tEO6Ug83iVH3tdedLnw== +micromark-extension-gfm-task-list-item@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz#bcc34d805639829990ec175c3eea12bb5b781f2c" + integrity sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw== dependencies: - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-factory-label@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.0.2.tgz" - integrity sha512-CTIwxlOnU7dEshXDQ+dsr2n+yxpP0+fn271pu0bwDIS8uqfFcumXpj5mLn3hSC8iw2MUr6Gx8EcKng1dD7i6hg== +micromark-extension-gfm@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz#3e13376ab95dd7a5cfd0e29560dfe999657b3c5b" + integrity sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w== + dependencies: + micromark-extension-gfm-autolink-literal "^2.0.0" + micromark-extension-gfm-footnote "^2.0.0" + micromark-extension-gfm-strikethrough "^2.0.0" + micromark-extension-gfm-table "^2.0.0" + micromark-extension-gfm-tagfilter "^2.0.0" + micromark-extension-gfm-task-list-item "^2.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-mdx-expression@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.0.tgz#1407b9ce69916cf5e03a196ad9586889df25302a" + integrity sha512-sI0nwhUDz97xyzqJAbHQhp5TfaxEvZZZ2JDqUo+7NvyIYG6BZ5CPPqj2ogUoPJlmXHBnyZUzISg9+oUmU6tUjQ== dependencies: - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-factory-mdx-expression "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-extension-mdx-jsx@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.1.tgz#5abb83da5ddc8e473a374453e6ea56fbd66b59ad" + integrity sha512-vNuFb9czP8QCtAQcEJn0UJQJZA8Dk6DXKBqx+bg/w0WGuSxDxNr7hErW89tHUY31dUW4NqEOWwmEUNhjTFmHkg== + dependencies: + "@types/acorn" "^4.0.0" + "@types/estree" "^1.0.0" + devlop "^1.0.0" + estree-util-is-identifier-name "^3.0.0" + micromark-factory-mdx-expression "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + vfile-message "^4.0.0" + +micromark-extension-mdx-md@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz#1d252881ea35d74698423ab44917e1f5b197b92d" + integrity sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ== + dependencies: + micromark-util-types "^2.0.0" + +micromark-extension-mdxjs-esm@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz#de21b2b045fd2059bd00d36746081de38390d54a" + integrity sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A== + dependencies: + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-position-from-estree "^2.0.0" + vfile-message "^4.0.0" + +micromark-extension-mdxjs@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz#b5a2e0ed449288f3f6f6c544358159557549de18" + integrity sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ== + dependencies: + acorn "^8.0.0" + acorn-jsx "^5.0.0" + micromark-extension-mdx-expression "^3.0.0" + micromark-extension-mdx-jsx "^3.0.0" + micromark-extension-mdx-md "^2.0.0" + micromark-extension-mdxjs-esm "^3.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-destination@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz#857c94debd2c873cba34e0445ab26b74f6a6ec07" + integrity sha512-j9DGrQLm/Uhl2tCzcbLhy5kXsgkHUrjJHg4fFAeoMRwJmJerT9aw4FEhIbZStWN8A3qMwOp1uzHr4UL8AInxtA== + dependencies: + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-label@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-factory-label/-/micromark-factory-label-2.0.0.tgz#17c5c2e66ce39ad6f4fc4cbf40d972f9096f726a" + integrity sha512-RR3i96ohZGde//4WSe/dJsxOX6vxIg9TimLAS3i4EhBAFx8Sm5SmqVfR8E87DPSR31nEAjZfbt91OMZWcNgdZw== + dependencies: + devlop "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-factory-mdx-expression@^1.0.0: - version "1.0.7" - resolved "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-1.0.7.tgz" - integrity sha512-QAdFbkQagTZ/eKb8zDGqmjvgevgJH3+aQpvvKrXWxNJp3o8/l2cAbbrBd0E04r0Gx6nssPpqWIjnbHFvZu5qsQ== +micromark-factory-mdx-expression@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.2.tgz#2afaa8ba6d5f63e0cead3e4dee643cad184ca260" + integrity sha512-5E5I2pFzJyg2CtemqAbcyCktpHXuJbABnsb32wX2U8IQKhhVFBqkcZR5LRm1WVoFqa4kTueZK4abep7wdo9nrw== dependencies: - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-events-to-acorn "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - unist-util-position-from-estree "^1.0.0" - uvu "^0.5.0" - vfile-message "^3.0.0" + "@types/estree" "^1.0.0" + devlop "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-events-to-acorn "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + unist-util-position-from-estree "^2.0.0" + vfile-message "^4.0.0" micromark-factory-space@^1.0.0: version "1.0.0" @@ -6738,26 +7078,33 @@ micromark-factory-space@^1.0.0: micromark-util-character "^1.0.0" micromark-util-types "^1.0.0" -micromark-factory-title@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.0.2.tgz" - integrity sha512-zily+Nr4yFqgMGRKLpTVsNl5L4PMu485fGFDOQJQBl2NFpjGte1e86zC0da93wf97jrc4+2G2GQudFMHn3IX+A== +micromark-factory-space@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-factory-space/-/micromark-factory-space-2.0.0.tgz#5e7afd5929c23b96566d0e1ae018ae4fcf81d030" + integrity sha512-TKr+LIDX2pkBJXFLzpyPyljzYK3MtmllMUMODTQJIUfDGncESaqB90db9IAUcz4AZAJFdd8U9zOp9ty1458rxg== dependencies: - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" + micromark-util-character "^2.0.0" + micromark-util-types "^2.0.0" -micromark-factory-whitespace@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.0.0.tgz" - integrity sha512-Qx7uEyahU1lt1RnsECBiuEbfr9INjQTGa6Err+gF3g0Tx4YEviPbqqGKNv/NrBaE7dVHdn1bVZKM/n5I/Bak7A== +micromark-factory-title@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-factory-title/-/micromark-factory-title-2.0.0.tgz#726140fc77892af524705d689e1cf06c8a83ea95" + integrity sha512-jY8CSxmpWLOxS+t8W+FG3Xigc0RDQA9bKMY/EwILvsesiRniiVMejYTE4wumNc2f4UbAa4WsHqe3J1QS1sli+A== dependencies: - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-factory-whitespace@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.0.tgz#9e92eb0f5468083381f923d9653632b3cfb5f763" + integrity sha512-28kbwaBjc5yAI1XadbdPYHX/eDnqaUFVikLwrO7FDnKG7lpgxnvk/XGRhX/PN0mOZ+dBSZ+LgunHS+6tYQAzhA== + dependencies: + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" micromark-util-character@^1.0.0: version "1.1.0" @@ -6767,135 +7114,167 @@ micromark-util-character@^1.0.0: micromark-util-symbol "^1.0.0" micromark-util-types "^1.0.0" -micromark-util-chunked@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.0.0.tgz" - integrity sha512-5e8xTis5tEZKgesfbQMKRCyzvffRRUX+lK/y+DvsMFdabAicPkkZV6gO+FEWi9RfuKKoxxPwNL+dFF0SMImc1g== +micromark-util-character@^1.1.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-1.2.0.tgz#4fedaa3646db249bc58caeb000eb3549a8ca5dcc" + integrity sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg== dependencies: micromark-util-symbol "^1.0.0" + micromark-util-types "^1.0.0" -micromark-util-classify-character@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.0.0.tgz" - integrity sha512-F8oW2KKrQRb3vS5ud5HIqBVkCqQi224Nm55o5wYLzY/9PwHGXC01tr3d7+TqHHz6zrKQ72Okwtvm/xQm6OVNZA== +micromark-util-character@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-character/-/micromark-util-character-2.1.0.tgz#31320ace16b4644316f6bf057531689c71e2aee1" + integrity sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ== dependencies: - micromark-util-character "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-util-combine-extensions@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.0.0.tgz" - integrity sha512-J8H058vFBdo/6+AsjHp2NF7AJ02SZtWaVUjsayNFeAiydTxUwViQPxN0Hf8dp4FmCQi0UUFovFsEyRSUmFH3MA== +micromark-util-chunked@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-chunked/-/micromark-util-chunked-2.0.0.tgz#e51f4db85fb203a79dbfef23fd41b2f03dc2ef89" + integrity sha512-anK8SWmNphkXdaKgz5hJvGa7l00qmcaUQoMYsBwDlSKFKjc6gjGXPDw3FNL3Nbwq5L8gE+RCbGqTw49FK5Qyvg== dependencies: - micromark-util-chunked "^1.0.0" - micromark-util-types "^1.0.0" + micromark-util-symbol "^2.0.0" -micromark-util-decode-numeric-character-reference@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.0.0.tgz" - integrity sha512-OzO9AI5VUtrTD7KSdagf4MWgHMtET17Ua1fIpXTpuhclCqD8egFWo85GxSGvxgkGS74bEahvtM0WP0HjvV0e4w== +micromark-util-classify-character@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-classify-character/-/micromark-util-classify-character-2.0.0.tgz#8c7537c20d0750b12df31f86e976d1d951165f34" + integrity sha512-S0ze2R9GH+fu41FA7pbSqNWObo/kzwf8rN/+IGlW/4tC6oACOs8B++bh+i9bVyNnwCcuksbFwsBme5OCKXCwIw== dependencies: - micromark-util-symbol "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" -micromark-util-decode-string@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.0.2.tgz" - integrity sha512-DLT5Ho02qr6QWVNYbRZ3RYOSSWWFuH3tJexd3dgN1odEuPNxCngTCXJum7+ViRAd9BbdxCvMToPOD/IvVhzG6Q== +micromark-util-combine-extensions@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.0.tgz#75d6ab65c58b7403616db8d6b31315013bfb7ee5" + integrity sha512-vZZio48k7ON0fVS3CUgFatWHoKbbLTK/rT7pzpJ4Bjp5JjkZeasRfrS9wsBdDJK2cJLHMckXZdzPSSr1B8a4oQ== + dependencies: + micromark-util-chunked "^2.0.0" + micromark-util-types "^2.0.0" + +micromark-util-decode-numeric-character-reference@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.1.tgz#2698bbb38f2a9ba6310e359f99fcb2b35a0d2bd5" + integrity sha512-bmkNc7z8Wn6kgjZmVHOX3SowGmVdhYS7yBpMnuMnPzDq/6xwVA604DuOXMZTO1lvq01g+Adfa0pE2UKGlxL1XQ== + dependencies: + micromark-util-symbol "^2.0.0" + +micromark-util-decode-string@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-decode-string/-/micromark-util-decode-string-2.0.0.tgz#7dfa3a63c45aecaa17824e656bcdb01f9737154a" + integrity sha512-r4Sc6leeUTn3P6gk20aFMj2ntPwn6qpDZqWvYmAG6NgvFTIlj4WtrAudLi65qYoaGdXYViXYw2pkmn7QnIFasA== dependencies: decode-named-character-reference "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-decode-numeric-character-reference "^1.0.0" - micromark-util-symbol "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-symbol "^2.0.0" -micromark-util-encode@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.0.1.tgz" - integrity sha512-U2s5YdnAYexjKDel31SVMPbfi+eF8y1U4pfiRW/Y8EFVCy/vgxk/2wWTxzcqE71LHtCuCzlBDRU2a5CQ5j+mQA== +micromark-util-encode@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-encode/-/micromark-util-encode-2.0.0.tgz#0921ac7953dc3f1fd281e3d1932decfdb9382ab1" + integrity sha512-pS+ROfCXAGLWCOc8egcBvT0kf27GoWMqtdarNfDcjb6YLuV5cM3ioG45Ys2qOVqeqSbjaKg72vU+Wby3eddPsA== -micromark-util-events-to-acorn@^1.0.0: - version "1.2.1" - resolved "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-1.2.1.tgz" - integrity sha512-mkg3BaWlw6ZTkQORrKVBW4o9ICXPxLtGz51vml5mQpKFdo9vqIX68CAx5JhTOdjQyAHH7JFmm4rh8toSPQZUmg== +micromark-util-events-to-acorn@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.2.tgz#4275834f5453c088bd29cd72dfbf80e3327cec07" + integrity sha512-Fk+xmBrOv9QZnEDguL9OI9/NQQp6Hz4FuQ4YmCb/5V7+9eAh1s6AYSvL20kHkD67YIg7EpE54TiSlcsf3vyZgA== dependencies: "@types/acorn" "^4.0.0" "@types/estree" "^1.0.0" - estree-util-visit "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" - vfile-location "^4.0.0" - vfile-message "^3.0.0" - -micromark-util-html-tag-name@^1.0.0: - version "1.1.0" - resolved "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.1.0.tgz" - integrity sha512-BKlClMmYROy9UiV03SwNmckkjn8QHVaWkqoAqzivabvdGcwNGMMMH/5szAnywmsTBUzDsU57/mFi0sp4BQO6dA== + "@types/unist" "^3.0.0" + devlop "^1.0.0" + estree-util-visit "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" + vfile-message "^4.0.0" + +micromark-util-html-tag-name@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.0.tgz#ae34b01cbe063363847670284c6255bb12138ec4" + integrity sha512-xNn4Pqkj2puRhKdKTm8t1YHC/BAjx6CEwRFXntTaRf/x16aqka6ouVoutm+QdkISTlT7e2zU7U4ZdlDLJd2Mcw== -micromark-util-normalize-identifier@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.0.0.tgz" - integrity sha512-yg+zrL14bBTFrQ7n35CmByWUTFsgst5JhA4gJYoty4Dqzj4Z4Fr/DHekSS5aLfH9bdlfnSvKAWsAgJhIbogyBg== +micromark-util-normalize-identifier@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.0.tgz#91f9a4e65fe66cc80c53b35b0254ad67aa431d8b" + integrity sha512-2xhYT0sfo85FMrUPtHcPo2rrp1lwbDEEzpx7jiH2xXJLqBuy4H0GgXk5ToU8IEwoROtXuL8ND0ttVa4rNqYK3w== dependencies: - micromark-util-symbol "^1.0.0" + micromark-util-symbol "^2.0.0" -micromark-util-resolve-all@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.0.0.tgz" - integrity sha512-CB/AGk98u50k42kvgaMM94wzBqozSzDDaonKU7P7jwQIuH2RU0TeBqGYJz2WY1UdihhjweivStrJ2JdkdEmcfw== +micromark-util-resolve-all@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.0.tgz#189656e7e1a53d0c86a38a652b284a252389f364" + integrity sha512-6KU6qO7DZ7GJkaCgwBNtplXCvGkJToU86ybBAUdavvgsCiG8lSSvYxr9MhwmQ+udpzywHsl4RpGJsYWG1pDOcA== dependencies: - micromark-util-types "^1.0.0" + micromark-util-types "^2.0.0" -micromark-util-sanitize-uri@^1.0.0, micromark-util-sanitize-uri@^1.1.0: - version "1.1.0" - resolved "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.1.0.tgz" - integrity sha512-RoxtuSCX6sUNtxhbmsEFQfWzs8VN7cTctmBPvYivo98xb/kDEoTCtJQX5wyzIYEmk/lvNFTat4hL8oW0KndFpg== +micromark-util-sanitize-uri@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.0.tgz#ec8fbf0258e9e6d8f13d9e4770f9be64342673de" + integrity sha512-WhYv5UEcZrbAtlsnPuChHUAsu/iBPOVaEVsntLBIdpibO0ddy8OzavZz3iL2xVvBZOpolujSliP65Kq0/7KIYw== dependencies: - micromark-util-character "^1.0.0" - micromark-util-encode "^1.0.0" - micromark-util-symbol "^1.0.0" + micromark-util-character "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-symbol "^2.0.0" -micromark-util-subtokenize@^1.0.0: - version "1.0.2" - resolved "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.0.2.tgz" - integrity sha512-d90uqCnXp/cy4G881Ub4psE57Sf8YD0pim9QdjCRNjfas2M1u6Lbt+XZK9gnHL2XFhnozZiEdCa9CNfXSfQ6xA== +micromark-util-subtokenize@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/micromark-util-subtokenize/-/micromark-util-subtokenize-2.0.1.tgz#76129c49ac65da6e479c09d0ec4b5f29ec6eace5" + integrity sha512-jZNtiFl/1aY73yS3UGQkutD0UbhTt68qnRpw2Pifmz5wV9h8gOVsN70v+Lq/f1rKaU/W8pxRe8y8Q9FX1AOe1Q== dependencies: - micromark-util-chunked "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.0" - uvu "^0.5.0" + devlop "^1.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" micromark-util-symbol@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.0.1.tgz" integrity sha512-oKDEMK2u5qqAptasDAwWDXq0tG9AssVwAx3E9bBF3t/shRIGsWIRG+cGafs2p/SnDSOecnt6hZPCE2o6lHfFmQ== -micromark-util-types@^1.0.0, micromark-util-types@^1.0.1: +micromark-util-symbol@^1.0.1: + version "1.1.0" + resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz#813cd17837bdb912d069a12ebe3a44b6f7063142" + integrity sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag== + +micromark-util-symbol@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz#12225c8f95edf8b17254e47080ce0862d5db8044" + integrity sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw== + +micromark-util-types@^1.0.0: version "1.0.2" resolved "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.0.2.tgz" integrity sha512-DCfg/T8fcrhrRKTPjRrw/5LLvdGV7BHySf/1LOZx7TzWZdYRjogNtyNq885z3nNallwr3QUKARjqvHqX1/7t+w== -micromark@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/micromark/-/micromark-3.1.0.tgz" - integrity sha512-6Mj0yHLdUZjHnOPgr5xfWIMqMWS12zDN6iws9SLuSz76W8jTtAv24MN4/CL7gJrl5vtxGInkkqDv/JIoRsQOvA== +micromark-util-types@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/micromark-util-types/-/micromark-util-types-2.0.0.tgz#63b4b7ffeb35d3ecf50d1ca20e68fc7caa36d95e" + integrity sha512-oNh6S2WMHWRZrmutsRmDDfkzKtxF+bc2VxLC9dvtrDIRFln627VsFP6fLMgTryGDljgLPjkrzQSDcPrjPyDJ5w== + +micromark@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/micromark/-/micromark-4.0.0.tgz#84746a249ebd904d9658cfabc1e8e5f32cbc6249" + integrity sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ== dependencies: "@types/debug" "^4.0.0" debug "^4.0.0" decode-named-character-reference "^1.0.0" - micromark-core-commonmark "^1.0.1" - micromark-factory-space "^1.0.0" - micromark-util-character "^1.0.0" - micromark-util-chunked "^1.0.0" - micromark-util-combine-extensions "^1.0.0" - micromark-util-decode-numeric-character-reference "^1.0.0" - micromark-util-encode "^1.0.0" - micromark-util-normalize-identifier "^1.0.0" - micromark-util-resolve-all "^1.0.0" - micromark-util-sanitize-uri "^1.0.0" - micromark-util-subtokenize "^1.0.0" - micromark-util-symbol "^1.0.0" - micromark-util-types "^1.0.1" - uvu "^0.5.0" + devlop "^1.0.0" + micromark-core-commonmark "^2.0.0" + micromark-factory-space "^2.0.0" + micromark-util-character "^2.0.0" + micromark-util-chunked "^2.0.0" + micromark-util-combine-extensions "^2.0.0" + micromark-util-decode-numeric-character-reference "^2.0.0" + micromark-util-encode "^2.0.0" + micromark-util-normalize-identifier "^2.0.0" + micromark-util-resolve-all "^2.0.0" + micromark-util-sanitize-uri "^2.0.0" + micromark-util-subtokenize "^2.0.0" + micromark-util-symbol "^2.0.0" + micromark-util-types "^2.0.0" micromatch@^4.0.2, micromatch@^4.0.4, micromatch@^4.0.5: version "4.0.8" @@ -6944,11 +7323,6 @@ mimic-fn@^4.0.0: resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-4.0.0.tgz#60a90550d5cb0b239cca65d893b1a53b29871ecc" integrity sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw== -mimic-response@^1.0.0, mimic-response@^1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz" - integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== - mimic-response@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" @@ -6959,20 +7333,13 @@ mimic-response@^4.0.0: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-4.0.0.tgz#35468b19e7c75d10f5165ea25e75a5ceea7cf70f" integrity sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg== -mini-create-react-context@^0.4.0: - version "0.4.1" - resolved "https://registry.npmjs.org/mini-create-react-context/-/mini-create-react-context-0.4.1.tgz" - integrity sha512-YWCYEmd5CQeHGSAKrYvXgmzzkrvssZcuuQDDeqkT+PziKGMgE+0MCCtcKbROzocGBG1meBLl2FotlRwf4gAzbQ== - dependencies: - "@babel/runtime" "^7.12.1" - tiny-warning "^1.0.3" - -mini-css-extract-plugin@^2.6.1: - version "2.6.1" - resolved "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-2.6.1.tgz" - integrity sha512-wd+SD57/K6DiV7jIR34P+s3uckTRuQvx0tKPcvjFlrEylk6P4mQ2KSWk1hblj1Kxaqok7LogKOieygXqBczNlg== +mini-css-extract-plugin@^2.9.1: + version "2.9.2" + resolved "https://registry.yarnpkg.com/mini-css-extract-plugin/-/mini-css-extract-plugin-2.9.2.tgz#966031b468917a5446f4c24a80854b2947503c5b" + integrity sha512-GJuACcS//jtq4kCtd5ii/M0SZf7OZRH+BxdqXZHaJfb8TJiVl+NgQRPwiYt2EuqeSkNydn/7vP+bcE27C5mb9w== dependencies: schema-utils "^4.0.0" + tapable "^2.2.1" minimalistic-assert@^1.0.0: version "1.0.1" @@ -6986,14 +7353,14 @@ minimatch@3.0.4, minimatch@^3.0.4: dependencies: brace-expansion "^1.1.7" -minimatch@^3.1.1: +minimatch@3.1.2, minimatch@^3.1.1: version "3.1.2" resolved "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== dependencies: brace-expansion "^1.1.7" -minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6: +minimist@^1.2.0, minimist@^1.2.6: version "1.2.6" resolved "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz" integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== @@ -7018,15 +7385,10 @@ modern-normalize@^1.1.0: resolved "https://registry.npmjs.org/modern-normalize/-/modern-normalize-1.1.0.tgz" integrity sha512-2lMlY1Yc1+CUy0gw4H95uNN7vjbpoED7NNRSBHE25nWfLBdmMzFCsPshlzbxHz+gYMcBEUN8V4pU16prcdPSgA== -mri@^1.1.0: - version "1.2.0" - resolved "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz" - integrity sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA== - -mrmime@^1.0.0: - version "1.0.1" - resolved "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz" - integrity sha512-hzzEagAgDyoU1Q6yg5uI+AorQgdvMCur3FcKf7NhMKWsaYg+RnbTyHRa/9IlLF9rf455MOCtcqqrQQ83pPP7Uw== +mrmime@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/mrmime/-/mrmime-2.0.0.tgz#151082a6e06e59a9a39b46b3e14d5cfe92b3abb4" + integrity sha512-eu38+hdgojoyq63s+yTpN4XMBdt5l8HhMhc4VKLO9KM5caLIBvUm4thi7fFaxyTmCKeNnXZ5pAlBwCUnhA09uw== ms@2.0.0: version "2.0.0" @@ -7038,7 +7400,7 @@ ms@2.1.2: resolved "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz" integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== -ms@2.1.3: +ms@2.1.3, ms@^2.1.3: version "2.1.3" resolved "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz" integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== @@ -7091,19 +7453,22 @@ node-addon-api@^6.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== -node-emoji@^1.10.0, node-emoji@^1.11.0: +node-emoji@^1.11.0: version "1.11.0" resolved "https://registry.npmjs.org/node-emoji/-/node-emoji-1.11.0.tgz" integrity sha512-wo2DpQkQp7Sjm2A0cq+sN7EHKO6Sl0ctXeBdFZrL9T9+UywORbufTcTZxom8YqpLQt/FqNMUkOpkZrJVYSKD3A== dependencies: lodash "^4.17.21" -node-fetch@2.6.7: - version "2.6.7" - resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== +node-emoji@^2.1.0: + version "2.1.3" + resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-2.1.3.tgz#93cfabb5cc7c3653aa52f29d6ffb7927d8047c06" + integrity sha512-E2WEOVsgs7O16zsURJ/eH8BqhF029wGpEOnv7Urwdo2wmQanOACwJQh0devF9D9RhoZru0+9JXIS0dBXIAz+lA== dependencies: - whatwg-url "^5.0.0" + "@sindresorhus/is" "^4.6.0" + char-regex "^1.0.2" + emojilib "^2.4.0" + skin-tone "^2.0.0" node-forge@^1: version "1.3.1" @@ -7137,16 +7502,6 @@ normalize-range@^0.1.2: resolved "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz" integrity sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA== -normalize-url@^4.1.0: - version "4.5.1" - resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz" - integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== - -normalize-url@^6.0.1: - version "6.1.0" - resolved "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz" - integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== - normalize-url@^8.0.0: version "8.0.1" resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-8.0.1.tgz#9b7d96af9836577c58f5883e939365fa15623a4a" @@ -7183,7 +7538,15 @@ nth-check@^2.0.0, nth-check@^2.0.1: dependencies: boolbase "^1.0.0" -object-assign@^4.1.0, object-assign@^4.1.1: +null-loader@^4.0.1: + version "4.0.1" + resolved "https://registry.yarnpkg.com/null-loader/-/null-loader-4.0.1.tgz#8e63bd3a2dd3c64236a4679428632edd0a6dbc6a" + integrity sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg== + dependencies: + loader-utils "^2.0.0" + schema-utils "^3.0.0" + +object-assign@^4.1.1: version "4.1.1" resolved "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz" integrity sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg== @@ -7280,17 +7643,12 @@ ora@^7.0.1: string-width "^6.1.0" strip-ansi "^7.1.0" -p-cancelable@^1.0.0: - version "1.1.0" - resolved "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz" - integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== - p-cancelable@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050" integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw== -p-limit@^2.0.0, p-limit@^2.2.0: +p-limit@^2.0.0: version "2.3.0" resolved "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz" integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== @@ -7304,6 +7662,13 @@ p-limit@^3.0.2: dependencies: yocto-queue "^0.1.0" +p-limit@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-4.0.0.tgz#914af6544ed32bfa54670b061cafcbd04984b644" + integrity sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ== + dependencies: + yocto-queue "^1.0.0" + p-locate@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz" @@ -7311,13 +7676,6 @@ p-locate@^3.0.0: dependencies: p-limit "^2.0.0" -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - p-locate@^5.0.0: version "5.0.0" resolved "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz" @@ -7325,6 +7683,13 @@ p-locate@^5.0.0: dependencies: p-limit "^3.0.2" +p-locate@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-6.0.0.tgz#3da9a49d4934b901089dca3302fa65dc5a05c04f" + integrity sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw== + dependencies: + p-limit "^4.0.0" + p-map@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz" @@ -7345,16 +7710,6 @@ p-try@^2.0.0: resolved "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz" integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== -package-json@^6.3.0: - version "6.5.0" - resolved "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz" - integrity sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ== - dependencies: - got "^9.6.0" - registry-auth-token "^4.0.0" - registry-url "^5.0.0" - semver "^6.2.0" - package-json@^8.1.0: version "8.1.1" resolved "https://registry.yarnpkg.com/package-json/-/package-json-8.1.1.tgz#3e9948e43df40d1e8e78a85485f1070bf8f03dc8" @@ -7375,23 +7730,11 @@ param-case@^3.0.4: parent-module@^1.0.0: version "1.0.1" - resolved "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== dependencies: callsites "^3.0.0" -parse-entities@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/parse-entities/-/parse-entities-2.0.0.tgz" - integrity sha512-kkywGpCcRYhqQIchaWqZ875wzpS/bMKhz5HnN3p7wveJTkTtyAB/AlnS0f8DFSqYW1T82t6yEAkEcB+A1I3MbQ== - dependencies: - character-entities "^1.0.0" - character-entities-legacy "^1.0.0" - character-reference-invalid "^1.0.0" - is-alphanumerical "^1.0.0" - is-decimal "^1.0.0" - is-hexadecimal "^1.0.0" - parse-entities@^4.0.0: version "4.0.1" resolved "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.1.tgz" @@ -7406,7 +7749,7 @@ parse-entities@^4.0.0: is-decimal "^2.0.0" is-hexadecimal "^2.0.0" -parse-json@^5.0.0: +parse-json@^5.0.0, parse-json@^5.2.0: version "5.2.0" resolved "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz" integrity sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg== @@ -7464,6 +7807,11 @@ path-exists@^4.0.0: resolved "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz" integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== +path-exists@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-5.0.0.tgz#a6aad9489200b21fab31e49cf09277e5116fb9e7" + integrity sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ== + path-is-absolute@^1.0.0: version "1.0.1" resolved "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz" @@ -7486,7 +7834,7 @@ path-key@^4.0.0: path-parse@^1.0.7: version "1.0.7" - resolved "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz" + resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" integrity sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw== path-to-regexp@0.1.10: @@ -7494,10 +7842,10 @@ path-to-regexp@0.1.10: resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.10.tgz#67e9108c5c0551b9e5326064387de4763c4d5f8b" integrity sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w== -path-to-regexp@2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-2.2.1.tgz" - integrity sha512-gu9bD6Ta5bwGrrU8muHzVOBFFREpp2iRkVfhBJahwJ6p6Xw20SjT0MxLnwkjOibQmGSYhiUnf2FLe7k+jcFmGQ== +path-to-regexp@3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-3.3.0.tgz#f7f31d32e8518c2660862b644414b6d5c63a611b" + integrity sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw== path-to-regexp@^1.7.0: version "1.8.0" @@ -7508,18 +7856,9 @@ path-to-regexp@^1.7.0: path-type@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b" integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw== -periscopic@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz" - integrity sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw== - dependencies: - "@types/estree" "^1.0.0" - estree-walker "^3.0.0" - is-reference "^3.0.0" - picocolors@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz" @@ -7530,17 +7869,22 @@ picocolors@^1.0.1: resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" integrity sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew== -picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.3.1: +picocolors@^1.1.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b" + integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA== + +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3, picomatch@^2.3.1: version "2.3.1" resolved "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz" integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA== -pkg-dir@^4.1.0: - version "4.2.0" - resolved "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== +pkg-dir@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-7.0.0.tgz#8f0c08d6df4476756c5ff29b3282d0bab7517d11" + integrity sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA== dependencies: - find-up "^4.0.0" + find-up "^6.3.0" pkg-up@^3.1.0: version "3.1.0" @@ -7563,58 +7907,58 @@ playwright@1.45.1: optionalDependencies: fsevents "2.3.2" -postcss-calc@^8.2.3: - version "8.2.4" - resolved "https://registry.npmjs.org/postcss-calc/-/postcss-calc-8.2.4.tgz" - integrity sha512-SmWMSJmB8MRnnULldx0lQIyhSNvuDl9HfrZkaqqE/WHAhToYsAvDq+yAsA/kIyINDszOp3Rh0GFoNuH5Ypsm3Q== +postcss-calc@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/postcss-calc/-/postcss-calc-9.0.1.tgz#a744fd592438a93d6de0f1434c572670361eb6c6" + integrity sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ== dependencies: - postcss-selector-parser "^6.0.9" + postcss-selector-parser "^6.0.11" postcss-value-parser "^4.2.0" -postcss-colormin@^5.3.0: - version "5.3.0" - resolved "https://registry.npmjs.org/postcss-colormin/-/postcss-colormin-5.3.0.tgz" - integrity sha512-WdDO4gOFG2Z8n4P8TWBpshnL3JpmNmJwdnfP2gbk2qBA8PWwOYcmjmI/t3CmMeL72a7Hkd+x/Mg9O2/0rD54Pg== +postcss-colormin@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-colormin/-/postcss-colormin-6.1.0.tgz#076e8d3fb291fbff7b10e6b063be9da42ff6488d" + integrity sha512-x9yX7DOxeMAR+BgGVnNSAxmAj98NX/YxEMNFP+SDCEeNLb2r3i6Hh1ksMsnW8Ub5SLCpbescQqn9YEbE9554Sw== dependencies: - browserslist "^4.16.6" + browserslist "^4.23.0" caniuse-api "^3.0.0" - colord "^2.9.1" + colord "^2.9.3" postcss-value-parser "^4.2.0" -postcss-convert-values@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/postcss-convert-values/-/postcss-convert-values-5.1.2.tgz" - integrity sha512-c6Hzc4GAv95B7suy4udszX9Zy4ETyMCgFPUDtWjdFTKH1SE9eFY/jEpHSwTH1QPuwxHpWslhckUQWbNRM4ho5g== +postcss-convert-values@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-convert-values/-/postcss-convert-values-6.1.0.tgz#3498387f8efedb817cbc63901d45bd1ceaa40f48" + integrity sha512-zx8IwP/ts9WvUM6NkVSkiU902QZL1bwPhaVaLynPtCsOTqp+ZKbNi+s6XJg3rfqpKGA/oc7Oxk5t8pOQJcwl/w== dependencies: - browserslist "^4.20.3" + browserslist "^4.23.0" postcss-value-parser "^4.2.0" -postcss-discard-comments@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/postcss-discard-comments/-/postcss-discard-comments-5.1.2.tgz" - integrity sha512-+L8208OVbHVF2UQf1iDmRcbdjJkuBF6IS29yBDSiWUIzpYaAhtNl6JYnYm12FnkeCwQqF5LeklOu6rAqgfBZqQ== +postcss-discard-comments@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-comments/-/postcss-discard-comments-6.0.2.tgz#e768dcfdc33e0216380623652b0a4f69f4678b6c" + integrity sha512-65w/uIqhSBBfQmYnG92FO1mWZjJ4GL5b8atm5Yw2UgrwD7HiNiSSNwJor1eCFGzUgYnN/iIknhNRVqjrrpuglw== -postcss-discard-duplicates@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-discard-duplicates/-/postcss-discard-duplicates-5.1.0.tgz" - integrity sha512-zmX3IoSI2aoenxHV6C7plngHWWhUOV3sP1T8y2ifzxzbtnuhk1EdPwm0S1bIUNaJ2eNbWeGLEwzw8huPD67aQw== +postcss-discard-duplicates@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-discard-duplicates/-/postcss-discard-duplicates-6.0.3.tgz#d121e893c38dc58a67277f75bb58ba43fce4c3eb" + integrity sha512-+JA0DCvc5XvFAxwx6f/e68gQu/7Z9ud584VLmcgto28eB8FqSFZwtrLwB5Kcp70eIoWP/HXqz4wpo8rD8gpsTw== -postcss-discard-empty@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-discard-empty/-/postcss-discard-empty-5.1.1.tgz" - integrity sha512-zPz4WljiSuLWsI0ir4Mcnr4qQQ5e1Ukc3i7UfE2XcrwKK2LIPIqE5jxMRxO6GbI3cv//ztXDsXwEWT3BHOGh3A== +postcss-discard-empty@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-discard-empty/-/postcss-discard-empty-6.0.3.tgz#ee39c327219bb70473a066f772621f81435a79d9" + integrity sha512-znyno9cHKQsK6PtxL5D19Fj9uwSzC2mB74cpT66fhgOadEUPyXFkbgwm5tvc3bt3NAy8ltE5MrghxovZRVnOjQ== -postcss-discard-overridden@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-discard-overridden/-/postcss-discard-overridden-5.1.0.tgz" - integrity sha512-21nOL7RqWR1kasIVdKs8HNqQJhFxLsyRfAnUDm4Fe4t4mCWL9OJiHvlHPjcd8zc5Myu89b/7wZDnOSjFgeWRtw== +postcss-discard-overridden@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-discard-overridden/-/postcss-discard-overridden-6.0.2.tgz#4e9f9c62ecd2df46e8fdb44dc17e189776572e2d" + integrity sha512-j87xzI4LUggC5zND7KdjsI25APtyMuynXZSujByMaav2roV6OZX+8AaCUcZSWqckZpjAjRyFDdpqybgjFO0HJQ== -postcss-discard-unused@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-discard-unused/-/postcss-discard-unused-5.1.0.tgz" - integrity sha512-KwLWymI9hbwXmJa0dkrzpRbSJEh0vVUd7r8t0yOGPcfKzyJJxFM8kLyC5Ev9avji6nY95pOp1W6HqIrfT+0VGw== +postcss-discard-unused@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/postcss-discard-unused/-/postcss-discard-unused-6.0.5.tgz#c1b0e8c032c6054c3fbd22aaddba5b248136f338" + integrity sha512-wHalBlRHkaNnNwfC8z+ppX57VhvS+HWgjW508esjdaEYr3Mx7Gnn2xA4R/CKf5+Z9S5qsqC+Uzh4ueENWwCVUA== dependencies: - postcss-selector-parser "^6.0.5" + postcss-selector-parser "^6.0.16" postcss-js@^3.0.3: version "3.0.3" @@ -7632,91 +7976,91 @@ postcss-load-config@^3.1.0: lilconfig "^2.0.5" yaml "^1.10.2" -postcss-loader@^7.0.0: - version "7.0.1" - resolved "https://registry.npmjs.org/postcss-loader/-/postcss-loader-7.0.1.tgz" - integrity sha512-VRviFEyYlLjctSM93gAZtcJJ/iSkPZ79zWbN/1fSH+NisBByEiVLqpdVDrPLVSi8DX0oJo12kL/GppTBdKVXiQ== +postcss-loader@^7.3.3: + version "7.3.4" + resolved "https://registry.yarnpkg.com/postcss-loader/-/postcss-loader-7.3.4.tgz#aed9b79ce4ed7e9e89e56199d25ad1ec8f606209" + integrity sha512-iW5WTTBSC5BfsBJ9daFMPVrLT36MrNiC6fqOZTTaHjBNX6Pfd5p+hSBqe/fEeNd7pc13QiAyGt7VdGMw4eRC4A== dependencies: - cosmiconfig "^7.0.0" - klona "^2.0.5" - semver "^7.3.7" + cosmiconfig "^8.3.5" + jiti "^1.20.0" + semver "^7.5.4" -postcss-merge-idents@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-merge-idents/-/postcss-merge-idents-5.1.1.tgz" - integrity sha512-pCijL1TREiCoog5nQp7wUe+TUonA2tC2sQ54UGeMmryK3UFGIYKqDyjnqd6RcuI4znFn9hWSLNN8xKE/vWcUQw== +postcss-merge-idents@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-merge-idents/-/postcss-merge-idents-6.0.3.tgz#7b9c31c7bc823c94bec50f297f04e3c2b838ea65" + integrity sha512-1oIoAsODUs6IHQZkLQGO15uGEbK3EAl5wi9SS8hs45VgsxQfMnxvt+L+zIr7ifZFIH14cfAeVe2uCTa+SPRa3g== dependencies: - cssnano-utils "^3.1.0" + cssnano-utils "^4.0.2" postcss-value-parser "^4.2.0" -postcss-merge-longhand@^5.1.6: - version "5.1.6" - resolved "https://registry.npmjs.org/postcss-merge-longhand/-/postcss-merge-longhand-5.1.6.tgz" - integrity sha512-6C/UGF/3T5OE2CEbOuX7iNO63dnvqhGZeUnKkDeifebY0XqkkvrctYSZurpNE902LDf2yKwwPFgotnfSoPhQiw== +postcss-merge-longhand@^6.0.5: + version "6.0.5" + resolved "https://registry.yarnpkg.com/postcss-merge-longhand/-/postcss-merge-longhand-6.0.5.tgz#ba8a8d473617c34a36abbea8dda2b215750a065a" + integrity sha512-5LOiordeTfi64QhICp07nzzuTDjNSO8g5Ksdibt44d+uvIIAE1oZdRn8y/W5ZtYgRH/lnLDlvi9F8btZcVzu3w== dependencies: postcss-value-parser "^4.2.0" - stylehacks "^5.1.0" + stylehacks "^6.1.1" -postcss-merge-rules@^5.1.2: - version "5.1.2" - resolved "https://registry.npmjs.org/postcss-merge-rules/-/postcss-merge-rules-5.1.2.tgz" - integrity sha512-zKMUlnw+zYCWoPN6yhPjtcEdlJaMUZ0WyVcxTAmw3lkkN/NDMRkOkiuctQEoWAOvH7twaxUUdvBWl0d4+hifRQ== +postcss-merge-rules@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/postcss-merge-rules/-/postcss-merge-rules-6.1.1.tgz#7aa539dceddab56019469c0edd7d22b64c3dea9d" + integrity sha512-KOdWF0gju31AQPZiD+2Ar9Qjowz1LTChSjFFbS+e2sFgc4uHOp3ZvVX4sNeTlk0w2O31ecFGgrFzhO0RSWbWwQ== dependencies: - browserslist "^4.16.6" + browserslist "^4.23.0" caniuse-api "^3.0.0" - cssnano-utils "^3.1.0" - postcss-selector-parser "^6.0.5" + cssnano-utils "^4.0.2" + postcss-selector-parser "^6.0.16" -postcss-minify-font-values@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-minify-font-values/-/postcss-minify-font-values-5.1.0.tgz" - integrity sha512-el3mYTgx13ZAPPirSVsHqFzl+BBBDrXvbySvPGFnQcTI4iNslrPaFq4muTkLZmKlGk4gyFAYUBMH30+HurREyA== +postcss-minify-font-values@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-minify-font-values/-/postcss-minify-font-values-6.1.0.tgz#a0e574c02ee3f299be2846369211f3b957ea4c59" + integrity sha512-gklfI/n+9rTh8nYaSJXlCo3nOKqMNkxuGpTn/Qm0gstL3ywTr9/WRKznE+oy6fvfolH6dF+QM4nCo8yPLdvGJg== dependencies: postcss-value-parser "^4.2.0" -postcss-minify-gradients@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-minify-gradients/-/postcss-minify-gradients-5.1.1.tgz" - integrity sha512-VGvXMTpCEo4qHTNSa9A0a3D+dxGFZCYwR6Jokk+/3oB6flu2/PnPXAh2x7x52EkY5xlIHLm+Le8tJxe/7TNhzw== +postcss-minify-gradients@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-minify-gradients/-/postcss-minify-gradients-6.0.3.tgz#ca3eb55a7bdb48a1e187a55c6377be918743dbd6" + integrity sha512-4KXAHrYlzF0Rr7uc4VrfwDJ2ajrtNEpNEuLxFgwkhFZ56/7gaE4Nr49nLsQDZyUe+ds+kEhf+YAUolJiYXF8+Q== dependencies: - colord "^2.9.1" - cssnano-utils "^3.1.0" + colord "^2.9.3" + cssnano-utils "^4.0.2" postcss-value-parser "^4.2.0" -postcss-minify-params@^5.1.3: - version "5.1.3" - resolved "https://registry.npmjs.org/postcss-minify-params/-/postcss-minify-params-5.1.3.tgz" - integrity sha512-bkzpWcjykkqIujNL+EVEPOlLYi/eZ050oImVtHU7b4lFS82jPnsCb44gvC6pxaNt38Els3jWYDHTjHKf0koTgg== +postcss-minify-params@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-minify-params/-/postcss-minify-params-6.1.0.tgz#54551dec77b9a45a29c3cb5953bf7325a399ba08" + integrity sha512-bmSKnDtyyE8ujHQK0RQJDIKhQ20Jq1LYiez54WiaOoBtcSuflfK3Nm596LvbtlFcpipMjgClQGyGr7GAs+H1uA== dependencies: - browserslist "^4.16.6" - cssnano-utils "^3.1.0" + browserslist "^4.23.0" + cssnano-utils "^4.0.2" postcss-value-parser "^4.2.0" -postcss-minify-selectors@^5.2.1: - version "5.2.1" - resolved "https://registry.npmjs.org/postcss-minify-selectors/-/postcss-minify-selectors-5.2.1.tgz" - integrity sha512-nPJu7OjZJTsVUmPdm2TcaiohIwxP+v8ha9NehQ2ye9szv4orirRU3SDdtUmKH+10nzn0bAyOXZ0UEr7OpvLehg== +postcss-minify-selectors@^6.0.4: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-minify-selectors/-/postcss-minify-selectors-6.0.4.tgz#197f7d72e6dd19eed47916d575d69dc38b396aff" + integrity sha512-L8dZSwNLgK7pjTto9PzWRoMbnLq5vsZSTu8+j1P/2GB8qdtGQfn+K1uSvFgYvgh83cbyxT5m43ZZhUMTJDSClQ== dependencies: - postcss-selector-parser "^6.0.5" + postcss-selector-parser "^6.0.16" -postcss-modules-extract-imports@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.0.0.tgz" - integrity sha512-bdHleFnP3kZ4NYDhuGlVK+CMrQ/pqUm8bx/oGL93K6gVwiclvX5x0n76fYMKuIGKzlABOy13zsvqjb0f92TEXw== +postcss-modules-extract-imports@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/postcss-modules-extract-imports/-/postcss-modules-extract-imports-3.1.0.tgz#b4497cb85a9c0c4b5aabeb759bb25e8d89f15002" + integrity sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q== -postcss-modules-local-by-default@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.0.tgz" - integrity sha512-sT7ihtmGSF9yhm6ggikHdV0hlziDTX7oFoXtuVWeDd3hHObNkcHRo9V3yg7vCAY7cONyxJC/XXCmmiHHcvX7bQ== +postcss-modules-local-by-default@^4.0.5: + version "4.0.5" + resolved "https://registry.yarnpkg.com/postcss-modules-local-by-default/-/postcss-modules-local-by-default-4.0.5.tgz#f1b9bd757a8edf4d8556e8d0f4f894260e3df78f" + integrity sha512-6MieY7sIfTK0hYfafw1OMEG+2bg8Q1ocHCpoWLqOKj3JXlKu4G7btkmM/B7lFubYkYWmRSPLZi5chid63ZaZYw== dependencies: icss-utils "^5.0.0" postcss-selector-parser "^6.0.2" postcss-value-parser "^4.1.0" -postcss-modules-scope@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-3.0.0.tgz" - integrity sha512-hncihwFA2yPath8oZ15PZqvWGkWf+XUfQgUGamS4LqoP1anQLOsOJw0vr7J7IwLpoY9fatA2qiGUGmuZL0Iqlg== +postcss-modules-scope@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/postcss-modules-scope/-/postcss-modules-scope-3.2.0.tgz#a43d28289a169ce2c15c00c4e64c0858e43457d5" + integrity sha512-oq+g1ssrsZOsx9M96c5w8laRmvEu9C3adDSjI8oTcbfkrTE8hx/zfyobUoWIxaKPO8bt6S62kxpw5GqypEw1QQ== dependencies: postcss-selector-parser "^6.0.4" @@ -7734,100 +8078,107 @@ postcss-nested@5.0.6: dependencies: postcss-selector-parser "^6.0.6" -postcss-normalize-charset@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-charset/-/postcss-normalize-charset-5.1.0.tgz" - integrity sha512-mSgUJ+pd/ldRGVx26p2wz9dNZ7ji6Pn8VWBajMXFf8jk7vUoSrZ2lt/wZR7DtlZYKesmZI680qjr2CeFF2fbUg== +postcss-normalize-charset@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-charset/-/postcss-normalize-charset-6.0.2.tgz#1ec25c435057a8001dac942942a95ffe66f721e1" + integrity sha512-a8N9czmdnrjPHa3DeFlwqst5eaL5W8jYu3EBbTTkI5FHkfMhFZh1EGbku6jhHhIzTA6tquI2P42NtZ59M/H/kQ== -postcss-normalize-display-values@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-display-values/-/postcss-normalize-display-values-5.1.0.tgz" - integrity sha512-WP4KIM4o2dazQXWmFaqMmcvsKmhdINFblgSeRgn8BJ6vxaMyaJkwAzpPpuvSIoG/rmX3M+IrRZEz2H0glrQNEA== +postcss-normalize-display-values@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-display-values/-/postcss-normalize-display-values-6.0.2.tgz#54f02764fed0b288d5363cbb140d6950dbbdd535" + integrity sha512-8H04Mxsb82ON/aAkPeq8kcBbAtI5Q2a64X/mnRRfPXBq7XeogoQvReqxEfc0B4WPq1KimjezNC8flUtC3Qz6jg== dependencies: postcss-value-parser "^4.2.0" -postcss-normalize-positions@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-positions/-/postcss-normalize-positions-5.1.1.tgz" - integrity sha512-6UpCb0G4eofTCQLFVuI3EVNZzBNPiIKcA1AKVka+31fTVySphr3VUgAIULBhxZkKgwLImhzMR2Bw1ORK+37INg== +postcss-normalize-positions@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-positions/-/postcss-normalize-positions-6.0.2.tgz#e982d284ec878b9b819796266f640852dbbb723a" + integrity sha512-/JFzI441OAB9O7VnLA+RtSNZvQ0NCFZDOtp6QPFo1iIyawyXg0YI3CYM9HBy1WvwCRHnPep/BvI1+dGPKoXx/Q== dependencies: postcss-value-parser "^4.2.0" -postcss-normalize-repeat-style@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-5.1.1.tgz" - integrity sha512-mFpLspGWkQtBcWIRFLmewo8aC3ImN2i/J3v8YCFUwDnPu3Xz4rLohDO26lGjwNsQxB3YF0KKRwspGzE2JEuS0g== +postcss-normalize-repeat-style@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-repeat-style/-/postcss-normalize-repeat-style-6.0.2.tgz#f8006942fd0617c73f049dd8b6201c3a3040ecf3" + integrity sha512-YdCgsfHkJ2jEXwR4RR3Tm/iOxSfdRt7jplS6XRh9Js9PyCR/aka/FCb6TuHT2U8gQubbm/mPmF6L7FY9d79VwQ== dependencies: postcss-value-parser "^4.2.0" -postcss-normalize-string@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-string/-/postcss-normalize-string-5.1.0.tgz" - integrity sha512-oYiIJOf4T9T1N4i+abeIc7Vgm/xPCGih4bZz5Nm0/ARVJ7K6xrDlLwvwqOydvyL3RHNf8qZk6vo3aatiw/go3w== +postcss-normalize-string@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-string/-/postcss-normalize-string-6.0.2.tgz#e3cc6ad5c95581acd1fc8774b309dd7c06e5e363" + integrity sha512-vQZIivlxlfqqMp4L9PZsFE4YUkWniziKjQWUtsxUiVsSSPelQydwS8Wwcuw0+83ZjPWNTl02oxlIvXsmmG+CiQ== dependencies: postcss-value-parser "^4.2.0" -postcss-normalize-timing-functions@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-5.1.0.tgz" - integrity sha512-DOEkzJ4SAXv5xkHl0Wa9cZLF3WCBhF3o1SKVxKQAa+0pYKlueTpCgvkFAHfk+Y64ezX9+nITGrDZeVGgITJXjg== +postcss-normalize-timing-functions@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-timing-functions/-/postcss-normalize-timing-functions-6.0.2.tgz#40cb8726cef999de984527cbd9d1db1f3e9062c0" + integrity sha512-a+YrtMox4TBtId/AEwbA03VcJgtyW4dGBizPl7e88cTFULYsprgHWTbfyjSLyHeBcK/Q9JhXkt2ZXiwaVHoMzA== dependencies: postcss-value-parser "^4.2.0" -postcss-normalize-unicode@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-unicode/-/postcss-normalize-unicode-5.1.0.tgz" - integrity sha512-J6M3MizAAZ2dOdSjy2caayJLQT8E8K9XjLce8AUQMwOrCvjCHv24aLC/Lps1R1ylOfol5VIDMaM/Lo9NGlk1SQ== +postcss-normalize-unicode@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-normalize-unicode/-/postcss-normalize-unicode-6.1.0.tgz#aaf8bbd34c306e230777e80f7f12a4b7d27ce06e" + integrity sha512-QVC5TQHsVj33otj8/JD869Ndr5Xcc/+fwRh4HAsFsAeygQQXm+0PySrKbr/8tkDKzW+EVT3QkqZMfFrGiossDg== dependencies: - browserslist "^4.16.6" + browserslist "^4.23.0" postcss-value-parser "^4.2.0" -postcss-normalize-url@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-normalize-url/-/postcss-normalize-url-5.1.0.tgz" - integrity sha512-5upGeDO+PVthOxSmds43ZeMeZfKH+/DKgGRD7TElkkyS46JXAUhMzIKiCa7BabPeIy3AQcTkXwVVN7DbqsiCew== +postcss-normalize-url@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-url/-/postcss-normalize-url-6.0.2.tgz#292792386be51a8de9a454cb7b5c58ae22db0f79" + integrity sha512-kVNcWhCeKAzZ8B4pv/DnrU1wNh458zBNp8dh4y5hhxih5RZQ12QWMuQrDgPRw3LRl8mN9vOVfHl7uhvHYMoXsQ== dependencies: - normalize-url "^6.0.1" postcss-value-parser "^4.2.0" -postcss-normalize-whitespace@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-normalize-whitespace/-/postcss-normalize-whitespace-5.1.1.tgz" - integrity sha512-83ZJ4t3NUDETIHTa3uEg6asWjSBYL5EdkVB0sDncx9ERzOKBVJIUeDO9RyA9Zwtig8El1d79HBp0JEi8wvGQnA== +postcss-normalize-whitespace@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-normalize-whitespace/-/postcss-normalize-whitespace-6.0.2.tgz#fbb009e6ebd312f8b2efb225c2fcc7cf32b400cd" + integrity sha512-sXZ2Nj1icbJOKmdjXVT9pnyHQKiSAyuNQHSgRCUgThn2388Y9cGVDR+E9J9iAYbSbLHI+UUwLVl1Wzco/zgv0Q== dependencies: postcss-value-parser "^4.2.0" -postcss-ordered-values@^5.1.3: - version "5.1.3" - resolved "https://registry.npmjs.org/postcss-ordered-values/-/postcss-ordered-values-5.1.3.tgz" - integrity sha512-9UO79VUhPwEkzbb3RNpqqghc6lcYej1aveQteWY+4POIwlqkYE21HKWaLDF6lWNuqCobEAyTovVhtI32Rbv2RQ== +postcss-ordered-values@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-ordered-values/-/postcss-ordered-values-6.0.2.tgz#366bb663919707093451ab70c3f99c05672aaae5" + integrity sha512-VRZSOB+JU32RsEAQrO94QPkClGPKJEL/Z9PCBImXMhIeK5KAYo6slP/hBYlLgrCjFxyqvn5VC81tycFEDBLG1Q== dependencies: - cssnano-utils "^3.1.0" + cssnano-utils "^4.0.2" postcss-value-parser "^4.2.0" -postcss-reduce-idents@^5.2.0: - version "5.2.0" - resolved "https://registry.npmjs.org/postcss-reduce-idents/-/postcss-reduce-idents-5.2.0.tgz" - integrity sha512-BTrLjICoSB6gxbc58D5mdBK8OhXRDqud/zodYfdSi52qvDHdMwk+9kB9xsM8yJThH/sZU5A6QVSmMmaN001gIg== +postcss-reduce-idents@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-reduce-idents/-/postcss-reduce-idents-6.0.3.tgz#b0d9c84316d2a547714ebab523ec7d13704cd486" + integrity sha512-G3yCqZDpsNPoQgbDUy3T0E6hqOQ5xigUtBQyrmq3tn2GxlyiL0yyl7H+T8ulQR6kOcHJ9t7/9H4/R2tv8tJbMA== dependencies: postcss-value-parser "^4.2.0" -postcss-reduce-initial@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-reduce-initial/-/postcss-reduce-initial-5.1.0.tgz" - integrity sha512-5OgTUviz0aeH6MtBjHfbr57tml13PuedK/Ecg8szzd4XRMbYxH4572JFG067z+FqBIf6Zp/d+0581glkvvWMFw== +postcss-reduce-initial@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/postcss-reduce-initial/-/postcss-reduce-initial-6.1.0.tgz#4401297d8e35cb6e92c8e9586963e267105586ba" + integrity sha512-RarLgBK/CrL1qZags04oKbVbrrVK2wcxhvta3GCxrZO4zveibqbRPmm2VI8sSgCXwoUHEliRSbOfpR0b/VIoiw== dependencies: - browserslist "^4.16.6" + browserslist "^4.23.0" caniuse-api "^3.0.0" -postcss-reduce-transforms@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-reduce-transforms/-/postcss-reduce-transforms-5.1.0.tgz" - integrity sha512-2fbdbmgir5AvpW9RLtdONx1QoYG2/EtqpNQbFASDlixBbAYuTcJ0dECwlqNqH7VbaUnEnh8SrxOe2sRIn24XyQ== +postcss-reduce-transforms@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-reduce-transforms/-/postcss-reduce-transforms-6.0.2.tgz#6fa2c586bdc091a7373caeee4be75a0f3e12965d" + integrity sha512-sB+Ya++3Xj1WaT9+5LOOdirAxP7dJZms3GRcYheSPi1PiTMigsxHAdkrbItHxwYHr4kt1zL7mmcHstgMYT+aiA== dependencies: postcss-value-parser "^4.2.0" -postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.5, postcss-selector-parser@^6.0.6, postcss-selector-parser@^6.0.9: +postcss-selector-parser@^6.0.11, postcss-selector-parser@^6.0.16: + version "6.1.2" + resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz#27ecb41fb0e3b6ba7a1ec84fff347f734c7929de" + integrity sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg== + dependencies: + cssesc "^3.0.0" + util-deprecate "^1.0.2" + +postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector-parser@^6.0.6: version "6.0.10" resolved "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz" integrity sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w== @@ -7835,27 +8186,27 @@ postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4, postcss-selector cssesc "^3.0.0" util-deprecate "^1.0.2" -postcss-sort-media-queries@^4.2.1: - version "4.2.1" - resolved "https://registry.npmjs.org/postcss-sort-media-queries/-/postcss-sort-media-queries-4.2.1.tgz" - integrity sha512-9VYekQalFZ3sdgcTjXMa0dDjsfBVHXlraYJEMiOJ/2iMmI2JGCMavP16z3kWOaRu8NSaJCTgVpB/IVpH5yT9YQ== +postcss-sort-media-queries@^5.2.0: + version "5.2.0" + resolved "https://registry.yarnpkg.com/postcss-sort-media-queries/-/postcss-sort-media-queries-5.2.0.tgz#4556b3f982ef27d3bac526b99b6c0d3359a6cf97" + integrity sha512-AZ5fDMLD8SldlAYlvi8NIqo0+Z8xnXU2ia0jxmuhxAU+Lqt9K+AlmLNJ/zWEnE9x+Zx3qL3+1K20ATgNOr3fAA== dependencies: - sort-css-media-queries "2.0.4" + sort-css-media-queries "2.2.0" -postcss-svgo@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-svgo/-/postcss-svgo-5.1.0.tgz" - integrity sha512-D75KsH1zm5ZrHyxPakAxJWtkyXew5qwS70v56exwvw542d9CRtTo78K0WeFxZB4G7JXKKMbEZtZayTGdIky/eA== +postcss-svgo@^6.0.3: + version "6.0.3" + resolved "https://registry.yarnpkg.com/postcss-svgo/-/postcss-svgo-6.0.3.tgz#1d6e180d6df1fa8a3b30b729aaa9161e94f04eaa" + integrity sha512-dlrahRmxP22bX6iKEjOM+c8/1p+81asjKT+V5lrgOH944ryx/OHpclnIbGsKVd3uWOXFLYJwCVf0eEkJGvO96g== dependencies: postcss-value-parser "^4.2.0" - svgo "^2.7.0" + svgo "^3.2.0" -postcss-unique-selectors@^5.1.1: - version "5.1.1" - resolved "https://registry.npmjs.org/postcss-unique-selectors/-/postcss-unique-selectors-5.1.1.tgz" - integrity sha512-5JiODlELrz8L2HwxfPnhOWZYWDxVHWL83ufOv84NrcgipI7TaeRsatAhK4Tr2/ZiYldpK/wBvw5BD3qfaK96GA== +postcss-unique-selectors@^6.0.4: + version "6.0.4" + resolved "https://registry.yarnpkg.com/postcss-unique-selectors/-/postcss-unique-selectors-6.0.4.tgz#983ab308896b4bf3f2baaf2336e14e52c11a2088" + integrity sha512-K38OCaIrO8+PzpArzkLKB42dSARtC2tmG6PvD4b1o1Q2E9Os8jzfWFfSy/rixsHwohtsDdFtAWGjFVFUdwYaMg== dependencies: - postcss-selector-parser "^6.0.5" + postcss-selector-parser "^6.0.16" postcss-value-parser@^3.3.0: version "3.3.1" @@ -7867,12 +8218,12 @@ postcss-value-parser@^4.1.0, postcss-value-parser@^4.2.0: resolved "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz" integrity sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ== -postcss-zindex@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/postcss-zindex/-/postcss-zindex-5.1.0.tgz" - integrity sha512-fgFMf0OtVSBR1va1JNHYgMxYk73yhn/qb4uQDq1DLGYolz8gHCyr/sesEuGUaYs58E3ZJRcpoGuPVoB7Meiq9A== +postcss-zindex@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/postcss-zindex/-/postcss-zindex-6.0.2.tgz#e498304b83a8b165755f53db40e2ea65a99b56e1" + integrity sha512-5BxW9l1evPB/4ZIc+2GobEBoKC+h8gPGCMi+jxsYvd2x0mjq7wazk6DrP71pStqxE9Foxh5TVnonbWpFZzXaYg== -postcss@^8.1.6, postcss@^8.3.11, postcss@^8.3.5, postcss@^8.4.13, postcss@^8.4.14, postcss@^8.4.7: +postcss@^8.1.6, postcss@^8.3.5: version "8.4.41" resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.41.tgz#d6104d3ba272d882fe18fc07d15dc2da62fa2681" integrity sha512-TesUflQ0WKZqAvg52PWL6kHgLKP6xB6heTOdoYM0Wt2UHyxNa4K25EZZMgKns3BH1RLVbZCREPpLY0rhnNoHVQ== @@ -7881,6 +8232,15 @@ postcss@^8.1.6, postcss@^8.3.11, postcss@^8.3.5, postcss@^8.4.13, postcss@^8.4.1 picocolors "^1.0.1" source-map-js "^1.2.0" +postcss@^8.4.21, postcss@^8.4.24, postcss@^8.4.26, postcss@^8.4.33, postcss@^8.4.38: + version "8.4.47" + resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.47.tgz#5bf6c9a010f3e724c503bf03ef7947dcb0fea365" + integrity sha512-56rxCq7G/XfB4EkXq9Egn5GCqugWvDFjafDOThIdMBsI15iqPqR5r15TfSr1YPYeEI19YeaXMCbY6u88Y76GLQ== + dependencies: + nanoid "^3.3.7" + picocolors "^1.1.0" + source-map-js "^1.2.1" + prebuild-install@^7.1.1: version "7.1.2" resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.2.tgz#a5fd9986f5a6251fbc47e1e5c65de71e68c0a056" @@ -7899,11 +8259,6 @@ prebuild-install@^7.1.1: tar-fs "^2.0.0" tunnel-agent "^0.6.0" -prepend-http@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz" - integrity sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA== - pretty-error@^4.0.0: version "4.0.0" resolved "https://registry.npmjs.org/pretty-error/-/pretty-error-4.0.0.tgz" @@ -7922,28 +8277,24 @@ pretty-time@^1.1.0: resolved "https://registry.npmjs.org/pretty-time/-/pretty-time-1.1.0.tgz" integrity sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA== -prism-react-renderer@^1.3.5: - version "1.3.5" - resolved "https://registry.npmjs.org/prism-react-renderer/-/prism-react-renderer-1.3.5.tgz" - integrity sha512-IJ+MSwBWKG+SM3b2SUfdrhC+gu01QkV2KmRQgREThBfSQRoufqRfxfHUxpG1WcaFjP+kojcFyO9Qqtpgt3qLCg== +prism-react-renderer@^2.3.0, prism-react-renderer@^2.4.0: + version "2.4.0" + resolved "https://registry.yarnpkg.com/prism-react-renderer/-/prism-react-renderer-2.4.0.tgz#c5ea692029c2f8b3fd04f63662d04ffd4eaf10a0" + integrity sha512-327BsVCD/unU4CNLZTWVHyUHKnsqcvj2qbPlQ8MiBE2eq2rgctjigPA1Gp9HLF83kZ20zNN6jgizHJeEsyFYOw== + dependencies: + "@types/prismjs" "^1.26.0" + clsx "^2.0.0" -prismjs@^1.28.0: - version "1.28.0" - resolved "https://registry.npmjs.org/prismjs/-/prismjs-1.28.0.tgz" - integrity sha512-8aaXdYvl1F7iC7Xm1spqSaY/OJBpYW3v+KJ+F17iYxvdc8sfjW194COK5wVhMZX45tGteiBQgdvD/nhxcRwylw== +prismjs@^1.29.0: + version "1.29.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.29.0.tgz#f113555a8fa9b57c35e637bba27509dcf802dd12" + integrity sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q== process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -promise@^7.1.1: - version "7.3.1" - resolved "https://registry.npmjs.org/promise/-/promise-7.3.1.tgz" - integrity sha512-nolQXZ/4L+bP/UGlkfaIujX9BKxGwmQ9OT4mOt5yvy8iK1h3wqTEJCijzGANTCCl9nWjY41juyAn2K3Q1hLLTg== - dependencies: - asap "~2.0.3" - prompts@^2.4.2: version "2.4.2" resolved "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz" @@ -7952,7 +8303,7 @@ prompts@^2.4.2: kleur "^3.0.3" sisteransi "^1.0.5" -prop-types@^15.0.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: +prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: version "15.8.1" resolved "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz" integrity sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg== @@ -7961,7 +8312,7 @@ prop-types@^15.0.0, prop-types@^15.6.2, prop-types@^15.7.2, prop-types@^15.8.1: object-assign "^4.1.1" react-is "^16.13.1" -property-information@^5.0.0, property-information@^5.3.0: +property-information@^5.0.0: version "5.6.0" resolved "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz" integrity sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA== @@ -7999,23 +8350,11 @@ pump@^3.0.0: end-of-stream "^1.1.0" once "^1.3.1" -punycode@^1.3.2: - version "1.4.1" - resolved "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz" - integrity sha512-jmYNElW7yvO7TV33CjSmvSiE2yco3bV2czu/OzDKdMNVZQWfxCblURLhf+47syQRBntjfLdd/H0egrzIG+oaFQ== - punycode@^2.1.0: version "2.1.1" resolved "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== -pupa@^2.1.1: - version "2.1.1" - resolved "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz" - integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A== - dependencies: - escape-goat "^2.0.0" - pupa@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/pupa/-/pupa-3.1.0.tgz#f15610274376bbcc70c9a3aa8b505ea23f41c579" @@ -8023,11 +8362,6 @@ pupa@^3.1.0: dependencies: escape-goat "^4.0.0" -pure-color@^1.2.0: - version "1.3.0" - resolved "https://registry.npmjs.org/pure-color/-/pure-color-1.3.0.tgz" - integrity sha512-QFADYnsVoBMw1srW7OVKEYjG+MbIa49s54w1MA1EDY6r2r/sTcKKYqRX1f4GYvnXP7eN/Pe9HFcX+hwzmrXRHA== - purgecss@^4.0.3: version "4.1.3" resolved "https://registry.npmjs.org/purgecss/-/purgecss-4.1.3.tgz" @@ -8094,7 +8428,7 @@ raw-body@2.5.2: iconv-lite "0.4.24" unpipe "1.0.0" -rc@1.2.8, rc@^1.2.7, rc@^1.2.8: +rc@1.2.8, rc@^1.2.7: version "1.2.8" resolved "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz" integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw== @@ -8104,16 +8438,6 @@ rc@1.2.8, rc@^1.2.7, rc@^1.2.8: minimist "^1.2.0" strip-json-comments "~2.0.1" -react-base16-styling@^0.6.0: - version "0.6.0" - resolved "https://registry.npmjs.org/react-base16-styling/-/react-base16-styling-0.6.0.tgz" - integrity sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ== - dependencies: - base16 "^1.0.0" - lodash.curry "^4.0.1" - lodash.flow "^3.3.0" - pure-color "^1.2.0" - react-dev-utils@^12.0.1: version "12.0.1" resolved "https://registry.npmjs.org/react-dev-utils/-/react-dev-utils-12.0.1.tgz" @@ -8144,14 +8468,13 @@ react-dev-utils@^12.0.1: strip-ansi "^6.0.1" text-table "^0.2.0" -react-dom@^17.0.2: - version "17.0.2" - resolved "https://registry.npmjs.org/react-dom/-/react-dom-17.0.2.tgz" - integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA== +react-dom@^18.3.1: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.3.1.tgz#c2265d79511b57d479b3dd3fdfa51536494c5cb4" + integrity sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw== dependencies: loose-envify "^1.1.0" - object-assign "^4.1.1" - scheduler "^0.20.2" + scheduler "^0.23.2" react-error-overlay@^6.0.11: version "6.0.11" @@ -8186,25 +8509,15 @@ react-is@^16.13.1, react-is@^16.6.0, react-is@^16.7.0: resolved "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== -react-is@^18.0.0, react-is@^18.2.0: +react-is@^18.2.0: version "18.2.0" resolved "https://registry.npmjs.org/react-is/-/react-is-18.2.0.tgz" integrity sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w== -react-json-view@^1.21.3: - version "1.21.3" - resolved "https://registry.npmjs.org/react-json-view/-/react-json-view-1.21.3.tgz" - integrity sha512-13p8IREj9/x/Ye4WI/JpjhoIwuzEgUAtgJZNBJckfzJt1qyh24BdTm6UQNGnyTq9dapQdrqvquZTo3dz1X6Cjw== - dependencies: - flux "^4.0.1" - react-base16-styling "^0.6.0" - react-lifecycles-compat "^3.0.4" - react-textarea-autosize "^8.3.2" - -react-lifecycles-compat@^3.0.4: - version "3.0.4" - resolved "https://registry.npmjs.org/react-lifecycles-compat/-/react-lifecycles-compat-3.0.4.tgz" - integrity sha512-fBASbA6LnOU9dOU2eW7aQ8xmYBSXUIWr+UmF9b1efZBazGNO+rcXT/icdKnYm2pTwcRylVUYwW7H1PHfLekVzA== +react-json-view-lite@^1.2.0: + version "1.5.0" + resolved "https://registry.yarnpkg.com/react-json-view-lite/-/react-json-view-lite-1.5.0.tgz#377cc302821717ac79a1b6d099e1891df54c8662" + integrity sha512-nWqA1E4jKPklL2jvHWs6s+7Na0qNgw9HCP6xehdQJeg6nPBTFZgGwyko9Q0oj+jQWKTTVRS30u0toM5wiuL3iw== react-loadable-ssr-addon-v5-slorber@^1.0.1: version "1.0.1" @@ -8213,34 +8526,28 @@ react-loadable-ssr-addon-v5-slorber@^1.0.1: dependencies: "@babel/runtime" "^7.10.3" -"react-loadable@npm:@docusaurus/react-loadable@5.5.2": - version "5.5.2" - resolved "https://registry.npmjs.org/@docusaurus/react-loadable/-/react-loadable-5.5.2.tgz" - integrity sha512-A3dYjdBGuy0IGT+wyLIGIKLRE+sAk1iNk0f1HjNDysO7u8lhL4N3VEm+FAubmJbAztn94F7MxBTPmnixbiyFdQ== +"react-loadable@npm:@docusaurus/react-loadable@6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@docusaurus/react-loadable/-/react-loadable-6.0.0.tgz#de6c7f73c96542bd70786b8e522d535d69069dc4" + integrity sha512-YMMxTUQV/QFSnbgrP3tjDzLHRg7vsbMn8e9HAa8o/1iXoiomo48b7sk/kkmWEuWNDPJVlKSJRB6Y2fHqdJk+SQ== dependencies: "@types/react" "*" - prop-types "^15.6.2" -react-markdown@^8.0.5: - version "8.0.5" - resolved "https://registry.npmjs.org/react-markdown/-/react-markdown-8.0.5.tgz" - integrity sha512-jGJolWWmOWAvzf+xMdB9zwStViODyyFQhNB/bwCerbBKmrTmgmA599CGiOlP58OId1IMoIRsA8UdI1Lod4zb5A== - dependencies: - "@types/hast" "^2.0.0" - "@types/prop-types" "^15.0.0" - "@types/unist" "^2.0.0" - comma-separated-tokens "^2.0.0" - hast-util-whitespace "^2.0.0" - prop-types "^15.0.0" - property-information "^6.0.0" - react-is "^18.0.0" - remark-parse "^10.0.0" - remark-rehype "^10.0.0" - space-separated-tokens "^2.0.0" - style-to-object "^0.4.0" - unified "^10.0.0" - unist-util-visit "^4.0.0" - vfile "^5.0.0" +react-markdown@^9.0.1: + version "9.0.1" + resolved "https://registry.yarnpkg.com/react-markdown/-/react-markdown-9.0.1.tgz#c05ddbff67fd3b3f839f8c648e6fb35d022397d1" + integrity sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg== + dependencies: + "@types/hast" "^3.0.0" + devlop "^1.0.0" + hast-util-to-jsx-runtime "^2.0.0" + html-url-attributes "^3.0.0" + mdast-util-to-hast "^13.0.0" + remark-parse "^11.0.0" + remark-rehype "^11.0.0" + unified "^11.0.0" + unist-util-visit "^5.0.0" + vfile "^6.0.0" react-router-config@^5.1.1: version "5.1.1" @@ -8249,44 +8556,34 @@ react-router-config@^5.1.1: dependencies: "@babel/runtime" "^7.1.2" -react-router-dom@^5.3.3: - version "5.3.3" - resolved "https://registry.npmjs.org/react-router-dom/-/react-router-dom-5.3.3.tgz" - integrity sha512-Ov0tGPMBgqmbu5CDmN++tv2HQ9HlWDuWIIqn4b88gjlAN5IHI+4ZUZRcpz9Hl0azFIwihbLDYw1OiHGRo7ZIng== +react-router-dom@^5.3.4: + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router-dom/-/react-router-dom-5.3.4.tgz#2ed62ffd88cae6db134445f4a0c0ae8b91d2e5e6" + integrity sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ== dependencies: "@babel/runtime" "^7.12.13" history "^4.9.0" loose-envify "^1.3.1" prop-types "^15.6.2" - react-router "5.3.3" + react-router "5.3.4" tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-router@5.3.3, react-router@^5.3.3: - version "5.3.3" - resolved "https://registry.npmjs.org/react-router/-/react-router-5.3.3.tgz" - integrity sha512-mzQGUvS3bM84TnbtMYR8ZjKnuPJ71IjSzR+DE6UkUqvN4czWIqEs17yLL8xkAycv4ev0AiN+IGrWu88vJs/p2w== +react-router@5.3.4, react-router@^5.3.4: + version "5.3.4" + resolved "https://registry.yarnpkg.com/react-router/-/react-router-5.3.4.tgz#8ca252d70fcc37841e31473c7a151cf777887bb5" + integrity sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA== dependencies: "@babel/runtime" "^7.12.13" history "^4.9.0" hoist-non-react-statics "^3.1.0" loose-envify "^1.3.1" - mini-create-react-context "^0.4.0" path-to-regexp "^1.7.0" prop-types "^15.6.2" react-is "^16.6.0" tiny-invariant "^1.0.2" tiny-warning "^1.0.0" -react-textarea-autosize@^8.3.2: - version "8.3.4" - resolved "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.3.4.tgz" - integrity sha512-CdtmP8Dc19xL8/R6sWvtknD/eCXkQr30dtvC4VmGInhRsfF8X/ihXCq6+9l9qbxmKRiq407/7z5fxE7cVWQNgQ== - dependencies: - "@babel/runtime" "^7.10.2" - use-composed-ref "^1.3.0" - use-latest "^1.2.1" - react-transition-group@^4.4.5: version "4.4.5" resolved "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz" @@ -8297,13 +8594,12 @@ react-transition-group@^4.4.5: loose-envify "^1.4.0" prop-types "^15.6.2" -react@^17.0.2: - version "17.0.2" - resolved "https://registry.npmjs.org/react/-/react-17.0.2.tgz" - integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA== +react@^18.3.1: + version "18.3.1" + resolved "https://registry.yarnpkg.com/react/-/react-18.3.1.tgz#49ab892009c53933625bd16b2533fc754cab2891" + integrity sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ== dependencies: loose-envify "^1.1.0" - object-assign "^4.1.1" readable-stream@^2.0.1: version "2.3.7" @@ -8353,7 +8649,47 @@ rechoir@^0.6.2: resolved "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz" integrity sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw== dependencies: - resolve "^1.1.6" + resolve "^1.1.6" + +recma-build-jsx@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-build-jsx/-/recma-build-jsx-1.0.0.tgz#c02f29e047e103d2fab2054954e1761b8ea253c4" + integrity sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew== + dependencies: + "@types/estree" "^1.0.0" + estree-util-build-jsx "^3.0.0" + vfile "^6.0.0" + +recma-jsx@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-jsx/-/recma-jsx-1.0.0.tgz#f7bef02e571a49d6ba3efdfda8e2efab48dbe3aa" + integrity sha512-5vwkv65qWwYxg+Atz95acp8DMu1JDSqdGkA2Of1j6rCreyFUE/gp15fC8MnGEuG1W68UKjM6x6+YTWIh7hZM/Q== + dependencies: + acorn-jsx "^5.0.0" + estree-util-to-js "^2.0.0" + recma-parse "^1.0.0" + recma-stringify "^1.0.0" + unified "^11.0.0" + +recma-parse@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-parse/-/recma-parse-1.0.0.tgz#c351e161bb0ab47d86b92a98a9d891f9b6814b52" + integrity sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ== + dependencies: + "@types/estree" "^1.0.0" + esast-util-from-js "^2.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +recma-stringify@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/recma-stringify/-/recma-stringify-1.0.0.tgz#54632030631e0c7546136ff9ef8fde8e7b44f130" + integrity sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g== + dependencies: + "@types/estree" "^1.0.0" + estree-util-to-js "^2.0.0" + unified "^11.0.0" + vfile "^6.0.0" recursive-readdir@^2.2.2: version "2.2.2" @@ -8377,25 +8713,27 @@ regenerate-unicode-properties@^10.0.1: dependencies: regenerate "^1.4.2" +regenerate-unicode-properties@^10.2.0: + version "10.2.0" + resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz#626e39df8c372338ea9b8028d1f99dc3fd9c3db0" + integrity sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA== + dependencies: + regenerate "^1.4.2" + regenerate@^1.4.2: version "1.4.2" resolved "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.4: - version "0.13.9" - resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== - regenerator-runtime@^0.14.0: - version "0.14.0" - resolved "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz" - integrity sha512-srw17NI0TUWHuGa5CFGGmhfNIeja30WMBfbslPNhf6JrqQlLN5gcrvig1oqPxiVaXb0oW0XRKtH6Nngs5lKCIA== + version "0.14.1" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz#356ade10263f685dda125100cd862c1db895327f" + integrity sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw== -regenerator-transform@^0.15.0: - version "0.15.0" - resolved "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.0.tgz" - integrity sha512-LsrGtPmbYg19bcPHwdtmXwbW+TqNvtY4riE3P83foeHRroMbH6/2ddFBfab3t7kbzc7v7p4wbkIecHImqt0QNg== +regenerator-transform@^0.15.2: + version "0.15.2" + resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.15.2.tgz#5bbae58b522098ebdf09bca2f83838929001c7a4" + integrity sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg== dependencies: "@babel/runtime" "^7.8.4" @@ -8411,12 +8749,17 @@ regexpu-core@^5.1.0: unicode-match-property-ecmascript "^2.0.0" unicode-match-property-value-ecmascript "^2.0.0" -registry-auth-token@^4.0.0: - version "4.2.2" - resolved "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.2.tgz" - integrity sha512-PC5ZysNb42zpFME6D/XlIgtNGdTl8bBOCw90xQLVMpzuuubJKYDWFAEuUNc+Cn8Z8724tg2SDhDRrkVEsqfDMg== +regexpu-core@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-6.1.1.tgz#b469b245594cb2d088ceebc6369dceb8c00becac" + integrity sha512-k67Nb9jvwJcJmVpw0jPttR1/zVfnKf8Km0IPatrU/zJ5XeG3+Slx0xLXs9HByJSzXzrlz5EDvN6yLNMDc2qdnw== dependencies: - rc "1.2.8" + regenerate "^1.4.2" + regenerate-unicode-properties "^10.2.0" + regjsgen "^0.8.0" + regjsparser "^0.11.0" + unicode-match-property-ecmascript "^2.0.0" + unicode-match-property-value-ecmascript "^2.1.0" registry-auth-token@^5.0.1: version "5.0.2" @@ -8425,13 +8768,6 @@ registry-auth-token@^5.0.1: dependencies: "@pnpm/npm-conf" "^2.1.0" -registry-url@^5.0.0: - version "5.1.0" - resolved "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz" - integrity sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw== - dependencies: - rc "^1.2.8" - registry-url@^6.0.0: version "6.0.1" resolved "https://registry.yarnpkg.com/registry-url/-/registry-url-6.0.1.tgz#056d9343680f2f64400032b1e199faa692286c58" @@ -8444,6 +8780,18 @@ regjsgen@^0.6.0: resolved "https://registry.npmjs.org/regjsgen/-/regjsgen-0.6.0.tgz" integrity sha512-ozE883Uigtqj3bx7OhL1KNbCzGyW2NQZPl6Hs09WTvCuZD5sTI4JY58bkbQWa/Y9hxIsvJ3M8Nbf7j54IqeZbA== +regjsgen@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.8.0.tgz#df23ff26e0c5b300a6470cad160a9d090c3a37ab" + integrity sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q== + +regjsparser@^0.11.0: + version "0.11.2" + resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.11.2.tgz#7404ad42be00226d72bcf1f003f1f441861913d8" + integrity sha512-3OGZZ4HoLJkkAZx/48mTXJNlmqTGOzc0o9OWQPuWpkOlXXPbyN6OafCcoXUnBqE2D3f/T5L+pWc1kdEmnfnRsA== + dependencies: + jsesc "~3.0.2" + regjsparser@^0.8.2: version "0.8.4" resolved "https://registry.npmjs.org/regjsparser/-/regjsparser-0.8.4.tgz" @@ -8459,94 +8807,109 @@ rehype-parse@^7.0.1: hast-util-from-parse5 "^6.0.0" parse5 "^6.0.0" +rehype-raw@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/rehype-raw/-/rehype-raw-7.0.0.tgz#59d7348fd5dbef3807bbaa1d443efd2dd85ecee4" + integrity sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww== + dependencies: + "@types/hast" "^3.0.0" + hast-util-raw "^9.0.0" + vfile "^6.0.0" + +rehype-recma@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/rehype-recma/-/rehype-recma-1.0.0.tgz#d68ef6344d05916bd96e25400c6261775411aa76" + integrity sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw== + dependencies: + "@types/estree" "^1.0.0" + "@types/hast" "^3.0.0" + hast-util-to-estree "^3.0.0" + relateurl@^0.2.7: version "0.2.7" resolved "https://registry.npmjs.org/relateurl/-/relateurl-0.2.7.tgz" integrity sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog== -remark-emoji@^2.2.0: - version "2.2.0" - resolved "https://registry.npmjs.org/remark-emoji/-/remark-emoji-2.2.0.tgz" - integrity sha512-P3cj9s5ggsUvWw5fS2uzCHJMGuXYRb0NnZqYlNecewXt8QBU9n5vW3DUUKOhepS8F9CwdMx9B8a3i7pqFWAI5w== +remark-directive@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/remark-directive/-/remark-directive-3.0.0.tgz#34452d951b37e6207d2e2a4f830dc33442923268" + integrity sha512-l1UyWJ6Eg1VPU7Hm/9tt0zKtReJQNOA4+iDMAxTyZNWnJnFlbS/7zhiel/rogTLQ2vMYwDzSJa4BiVNqGlqIMA== dependencies: - emoticon "^3.2.0" - node-emoji "^1.10.0" - unist-util-visit "^2.0.3" + "@types/mdast" "^4.0.0" + mdast-util-directive "^3.0.0" + micromark-extension-directive "^3.0.0" + unified "^11.0.0" -remark-footnotes@2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/remark-footnotes/-/remark-footnotes-2.0.0.tgz" - integrity sha512-3Clt8ZMH75Ayjp9q4CorNeyjwIxHFcTkaektplKGl2A1jNGEUey8cKL0ZC5vJwfcD5GFGsNLImLG/NGzWIzoMQ== - -remark-mdx@1.6.22: - version "1.6.22" - resolved "https://registry.npmjs.org/remark-mdx/-/remark-mdx-1.6.22.tgz" - integrity sha512-phMHBJgeV76uyFkH4rvzCftLfKCr2RZuF+/gmVcaKrpsihyzmhXjA0BEMDaPTXG5y8qZOKPVo83NAOX01LPnOQ== - dependencies: - "@babel/core" "7.12.9" - "@babel/helper-plugin-utils" "7.10.4" - "@babel/plugin-proposal-object-rest-spread" "7.12.1" - "@babel/plugin-syntax-jsx" "7.12.1" - "@mdx-js/util" "1.6.22" - is-alphabetical "1.0.4" - remark-parse "8.0.3" - unified "9.2.0" - -remark-mdx@^2.0.0: - version "2.3.0" - resolved "https://registry.npmjs.org/remark-mdx/-/remark-mdx-2.3.0.tgz" - integrity sha512-g53hMkpM0I98MU266IzDFMrTD980gNF3BJnkyFcmN+dD873mQeD5rdMO3Y2X+x8umQfbSE0PcoEDl7ledSA+2g== - dependencies: - mdast-util-mdx "^2.0.0" - micromark-extension-mdxjs "^1.0.0" - -remark-parse@8.0.3: - version "8.0.3" - resolved "https://registry.npmjs.org/remark-parse/-/remark-parse-8.0.3.tgz" - integrity sha512-E1K9+QLGgggHxCQtLt++uXltxEprmWzNfg+MxpfHsZlrddKzZ/hZyWHDbK3/Ap8HJQqYJRXP+jHczdL6q6i85Q== - dependencies: - ccount "^1.0.0" - collapse-white-space "^1.0.2" - is-alphabetical "^1.0.0" - is-decimal "^1.0.0" - is-whitespace-character "^1.0.0" - is-word-character "^1.0.0" - markdown-escapes "^1.0.0" - parse-entities "^2.0.0" - repeat-string "^1.5.4" - state-toggle "^1.0.0" - trim "0.0.1" - trim-trailing-lines "^1.0.0" - unherit "^1.0.4" - unist-util-remove-position "^2.0.0" - vfile-location "^3.0.0" - xtend "^4.0.1" - -remark-parse@^10.0.0: - version "10.0.1" - resolved "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.1.tgz" - integrity sha512-1fUyHr2jLsVOkhbvPRBJ5zTKZZyD6yZzYaWCS6BPBdQ8vEMBCH+9zNCDA6tET/zHCi/jLqjCWtlJZUPk+DbnFw== +remark-emoji@^4.0.0: + version "4.0.1" + resolved "https://registry.yarnpkg.com/remark-emoji/-/remark-emoji-4.0.1.tgz#671bfda668047689e26b2078c7356540da299f04" + integrity sha512-fHdvsTR1dHkWKev9eNyhTo4EFwbUvJ8ka9SgeWkMPYFX4WoI7ViVBms3PjlQYgw5TLvNQso3GUB/b/8t3yo+dg== dependencies: - "@types/mdast" "^3.0.0" - mdast-util-from-markdown "^1.0.0" - unified "^10.0.0" + "@types/mdast" "^4.0.2" + emoticon "^4.0.1" + mdast-util-find-and-replace "^3.0.1" + node-emoji "^2.1.0" + unified "^11.0.4" -remark-rehype@^10.0.0: - version "10.1.0" - resolved "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz" - integrity sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw== +remark-frontmatter@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz#b68d61552a421ec412c76f4f66c344627dc187a2" + integrity sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ== dependencies: - "@types/hast" "^2.0.0" - "@types/mdast" "^3.0.0" - mdast-util-to-hast "^12.1.0" - unified "^10.0.0" + "@types/mdast" "^4.0.0" + mdast-util-frontmatter "^2.0.0" + micromark-extension-frontmatter "^2.0.0" + unified "^11.0.0" -remark-squeeze-paragraphs@4.0.0: +remark-gfm@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/remark-squeeze-paragraphs/-/remark-squeeze-paragraphs-4.0.0.tgz" - integrity sha512-8qRqmL9F4nuLPIgl92XUuxI3pFxize+F1H0e/W3llTk0UsjJaj01+RrirkMw7P21RKe4X6goQhYRSvNWX+70Rw== + resolved "https://registry.yarnpkg.com/remark-gfm/-/remark-gfm-4.0.0.tgz#aea777f0744701aa288b67d28c43565c7e8c35de" + integrity sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-gfm "^3.0.0" + micromark-extension-gfm "^3.0.0" + remark-parse "^11.0.0" + remark-stringify "^11.0.0" + unified "^11.0.0" + +remark-mdx@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/remark-mdx/-/remark-mdx-3.1.0.tgz#f979be729ecb35318fa48e2135c1169607a78343" + integrity sha512-Ngl/H3YXyBV9RcRNdlYsZujAmhsxwzxpDzpDEhFBVAGthS4GDgnctpDjgFl/ULx5UEDzqtW1cyBSNKqYYrqLBA== + dependencies: + mdast-util-mdx "^3.0.0" + micromark-extension-mdxjs "^3.0.0" + +remark-parse@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/remark-parse/-/remark-parse-11.0.0.tgz#aa60743fcb37ebf6b069204eb4da304e40db45a1" + integrity sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA== + dependencies: + "@types/mdast" "^4.0.0" + mdast-util-from-markdown "^2.0.0" + micromark-util-types "^2.0.0" + unified "^11.0.0" + +remark-rehype@^11.0.0: + version "11.1.1" + resolved "https://registry.yarnpkg.com/remark-rehype/-/remark-rehype-11.1.1.tgz#f864dd2947889a11997c0a2667cd6b38f685bca7" + integrity sha512-g/osARvjkBXb6Wo0XvAeXQohVta8i84ACbenPpoSsxTOQH/Ae0/RGP4WZgnMH5pMLpsj4FG7OHmcIcXxpza8eQ== + dependencies: + "@types/hast" "^3.0.0" + "@types/mdast" "^4.0.0" + mdast-util-to-hast "^13.0.0" + unified "^11.0.0" + vfile "^6.0.0" + +remark-stringify@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/remark-stringify/-/remark-stringify-11.0.0.tgz#4c5b01dd711c269df1aaae11743eb7e2e7636fd3" + integrity sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw== dependencies: - mdast-squeeze-paragraphs "^4.0.0" + "@types/mdast" "^4.0.0" + mdast-util-to-markdown "^2.0.0" + unified "^11.0.0" renderkid@^3.0.0: version "3.0.0" @@ -8559,7 +8922,7 @@ renderkid@^3.0.0: lodash "^4.17.21" strip-ansi "^6.0.1" -repeat-string@^1.0.0, repeat-string@^1.5.4: +repeat-string@^1.0.0: version "1.6.1" resolved "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz" integrity sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w== @@ -8586,7 +8949,7 @@ resolve-alpn@^1.2.0: resolve-from@^4.0.0: version "4.0.0" - resolved "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== resolve-pathname@^3.0.0: @@ -8594,7 +8957,7 @@ resolve-pathname@^3.0.0: resolved "https://registry.npmjs.org/resolve-pathname/-/resolve-pathname-3.0.0.tgz" integrity sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng== -resolve@^1.1.6, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.3.2: +resolve@^1.1.6, resolve@^1.14.2, resolve@^1.20.0: version "1.22.1" resolved "https://registry.npmjs.org/resolve/-/resolve-1.22.1.tgz" integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== @@ -8603,12 +8966,14 @@ resolve@^1.1.6, resolve@^1.14.2, resolve@^1.19.0, resolve@^1.20.0, resolve@^1.3. path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" -responselike@^1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz" - integrity sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ== +resolve@^1.19.0: + version "1.22.8" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d" + integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw== dependencies: - lowercase-keys "^1.0.0" + is-core-module "^2.13.0" + path-parse "^1.0.7" + supports-preserve-symlinks-flag "^1.0.0" responselike@^3.0.0: version "3.0.0" @@ -8657,14 +9022,14 @@ rtl-detect@^1.0.4: resolved "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.0.4.tgz" integrity sha512-EBR4I2VDSSYr7PkBmFy04uhycIpDKp+21p/jARYXlCSjQksTBQcJ0HFUPOO79EPPH5JS6VAhiIQbycf0O3JAxQ== -rtlcss@^3.5.0: - version "3.5.0" - resolved "https://registry.npmjs.org/rtlcss/-/rtlcss-3.5.0.tgz" - integrity sha512-wzgMaMFHQTnyi9YOwsx9LjOxYXJPzS8sYnFaKm6R5ysvTkwzHiB0vxnbHwchHQT65PTdBjDG21/kQBWI7q9O7A== +rtlcss@^4.1.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/rtlcss/-/rtlcss-4.3.0.tgz#f8efd4d5b64f640ec4af8fa25b65bacd9e07cc97" + integrity sha512-FI+pHEn7Wc4NqKXMXFM+VAYKEj/mRIcW4h24YVwVtyjI+EqGrLc2Hx/Ny0lrZ21cBWU2goLy36eqMcNj3AQJig== dependencies: - find-up "^5.0.0" + escalade "^3.1.1" picocolors "^1.0.0" - postcss "^8.3.11" + postcss "^8.4.21" strip-json-comments "^3.1.1" run-parallel@^1.1.9: @@ -8674,20 +9039,6 @@ run-parallel@^1.1.9: dependencies: queue-microtask "^1.2.2" -rxjs@^7.5.4: - version "7.5.6" - resolved "https://registry.npmjs.org/rxjs/-/rxjs-7.5.6.tgz" - integrity sha512-dnyv2/YsXhnm461G+R/Pe5bWP41Nm6LBXEYWI6eiFP4fiwx6WRI/CD0zbdVAudd9xwLEF2IDcKXLHit0FYjUzw== - dependencies: - tslib "^2.1.0" - -sade@^1.7.3: - version "1.8.1" - resolved "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz" - integrity sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A== - dependencies: - mri "^1.1.0" - safe-buffer@5.1.2, safe-buffer@>=5.1.0, safe-buffer@^5.1.0, safe-buffer@~5.1.0, safe-buffer@~5.1.1: version "5.1.2" resolved "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz" @@ -8708,13 +9059,12 @@ sax@^1.2.4: resolved "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz" integrity sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw== -scheduler@^0.20.2: - version "0.20.2" - resolved "https://registry.npmjs.org/scheduler/-/scheduler-0.20.2.tgz" - integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ== +scheduler@^0.23.2: + version "0.23.2" + resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.2.tgz#414ba64a3b282892e944cf2108ecc078d115cdc3" + integrity sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ== dependencies: loose-envify "^1.1.0" - object-assign "^4.1.1" schema-utils@2.7.0: version "2.7.0" @@ -8725,15 +9075,6 @@ schema-utils@2.7.0: ajv "^6.12.2" ajv-keywords "^3.4.1" -schema-utils@^2.6.5: - version "2.7.1" - resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz" - integrity sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg== - dependencies: - "@types/json-schema" "^7.0.5" - ajv "^6.12.4" - ajv-keywords "^3.5.2" - schema-utils@^3.0.0, schema-utils@^3.1.1: version "3.1.1" resolved "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz" @@ -8762,6 +9103,16 @@ schema-utils@^4.0.0: ajv-formats "^2.1.1" ajv-keywords "^5.0.0" +schema-utils@^4.0.1: + version "4.2.0" + resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-4.2.0.tgz#70d7c93e153a273a805801882ebd3bff20d89c8b" + integrity sha512-L0jRsrPpjdckP3oPug3/VxNKt2trR8TcabrM6FOAAlvC/9Phcmm+cuAgTlxBqdBR1WJx7Naj9WHw+aOmheSVbw== + dependencies: + "@types/json-schema" "^7.0.9" + ajv "^8.9.0" + ajv-formats "^2.1.1" + ajv-keywords "^5.1.0" + section-matter@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/section-matter/-/section-matter-1.0.0.tgz" @@ -8775,20 +9126,14 @@ select-hose@^2.0.0: resolved "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz" integrity sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg== -selfsigned@^2.0.1: - version "2.0.1" - resolved "https://registry.npmjs.org/selfsigned/-/selfsigned-2.0.1.tgz" - integrity sha512-LmME957M1zOsUhG+67rAjKfiWFox3SBxE/yymatMZsAx+oMrJ0YQ8AToOnyCm7xbeg2ep37IHLxdu0o2MavQOQ== +selfsigned@^2.1.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-2.4.1.tgz#560d90565442a3ed35b674034cec4e95dceb4ae0" + integrity sha512-th5B4L2U+eGLq1TVh7zNRGBapioSORUeymIydxgFpwww9d2qyKvtuPU2jJuHvYAwwqi2Y596QBL3eEqcPEYL8Q== dependencies: + "@types/node-forge" "^1.3.0" node-forge "^1" -semver-diff@^3.1.1: - version "3.1.1" - resolved "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz" - integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg== - dependencies: - semver "^6.3.0" - semver-diff@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-4.0.0.tgz#3afcf5ed6d62259f5c72d0d5d50dffbdc9680df5" @@ -8796,22 +9141,12 @@ semver-diff@^4.0.0: dependencies: semver "^7.3.5" -semver@7.0.0: - version "7.0.0" - resolved "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz" - integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - -semver@^5.4.1: - version "5.7.1" - resolved "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== +semver@^6.3.1: + version "6.3.1" + resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4" + integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA== -semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: +semver@^7.3.2, semver@^7.3.5, semver@^7.3.7: version "7.3.7" resolved "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz" integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== @@ -8856,18 +9191,17 @@ serialize-javascript@^6.0.1: dependencies: randombytes "^2.1.0" -serve-handler@^6.1.3: - version "6.1.3" - resolved "https://registry.npmjs.org/serve-handler/-/serve-handler-6.1.3.tgz" - integrity sha512-FosMqFBNrLyeiIDvP1zgO6YoTzFYHxLDEIavhlmQ+knB2Z7l1t+kGLHkZIDN7UVWqQAmKI3D20A6F6jo3nDd4w== +serve-handler@^6.1.6: + version "6.1.6" + resolved "https://registry.yarnpkg.com/serve-handler/-/serve-handler-6.1.6.tgz#50803c1d3e947cd4a341d617f8209b22bd76cfa1" + integrity sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ== dependencies: bytes "3.0.0" content-disposition "0.5.2" - fast-url-parser "1.1.3" mime-types "2.1.18" - minimatch "3.0.4" + minimatch "3.1.2" path-is-inside "1.0.2" - path-to-regexp "2.2.1" + path-to-regexp "3.3.0" range-parser "1.2.0" serve-index@^1.9.1: @@ -8905,11 +9239,6 @@ set-function-length@^1.2.1: gopd "^1.0.1" has-property-descriptors "^1.0.2" -setimmediate@^1.0.5: - version "1.0.5" - resolved "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz" - integrity sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA== - setprototypeof@1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz" @@ -8963,6 +9292,11 @@ shell-quote@^1.7.3: resolved "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz" integrity sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw== +shell-quote@^1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + shelljs@^0.8.5: version "0.8.5" resolved "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz" @@ -9008,14 +9342,14 @@ simple-swizzle@^0.2.2: dependencies: is-arrayish "^0.3.1" -sirv@^1.0.7: - version "1.0.19" - resolved "https://registry.npmjs.org/sirv/-/sirv-1.0.19.tgz" - integrity sha512-JuLThK3TnZG1TAKDwNIqNq6QA2afLOCcm+iE8D1Kj3GA40pSPsxQjjJl0J8X3tsR7T+CP1GavpzLwYkgVLWrZQ== +sirv@^2.0.3: + version "2.0.4" + resolved "https://registry.yarnpkg.com/sirv/-/sirv-2.0.4.tgz#5dd9a725c578e34e449f332703eb2a74e46a29b0" + integrity sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ== dependencies: - "@polka/url" "^1.0.0-next.20" - mrmime "^1.0.0" - totalist "^1.0.0" + "@polka/url" "^1.0.0-next.24" + mrmime "^2.0.0" + totalist "^3.0.0" sisteransi@^1.0.5: version "1.0.5" @@ -9032,6 +9366,13 @@ sitemap@^7.1.1: arg "^5.0.0" sax "^1.2.4" +skin-tone@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/skin-tone/-/skin-tone-2.0.0.tgz#4e3933ab45c0d4f4f781745d64b9f4c208e41237" + integrity sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA== + dependencies: + unicode-emoji-modifier-base "^1.0.0" + slash@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz" @@ -9042,6 +9383,14 @@ slash@^4.0.0: resolved "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz" integrity sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew== +snake-case@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-3.0.4.tgz#4f2bbd568e9935abdfd593f34c691dadb49c452c" + integrity sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg== + dependencies: + dot-case "^3.0.4" + tslib "^2.0.3" + sockjs@^0.3.24: version "0.3.24" resolved "https://registry.npmjs.org/sockjs/-/sockjs-0.3.24.tgz" @@ -9051,10 +9400,15 @@ sockjs@^0.3.24: uuid "^8.3.2" websocket-driver "^0.7.4" -sort-css-media-queries@2.0.4: - version "2.0.4" - resolved "https://registry.npmjs.org/sort-css-media-queries/-/sort-css-media-queries-2.0.4.tgz" - integrity sha512-PAIsEK/XupCQwitjv7XxoMvYhT7EAfyzI3hsy/MyDgTvc+Ft55ctdkctJLOy6cQejaIC+zjpUL4djFVm2ivOOw== +sort-css-media-queries@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/sort-css-media-queries/-/sort-css-media-queries-2.2.0.tgz#aa33cf4a08e0225059448b6c40eddbf9f1c8334c" + integrity sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA== + +source-map-js@^1.0.1, source-map-js@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46" + integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA== source-map-js@^1.2.0: version "1.2.0" @@ -9069,12 +9423,12 @@ source-map-support@~0.5.20: buffer-from "^1.0.0" source-map "^0.6.0" -source-map@^0.5.0, source-map@^0.5.7: +source-map@^0.5.7: version "0.5.7" - resolved "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz" + resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" integrity sha512-LbrmJOMUSdEVxIKvdcJzQC+nQhe8FUZQTXQy6+I75skNgn3OoQ0DZA8YnFa7gp8tqtL3KPf1kmo0R5DoApeSGQ== -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.0: +source-map@^0.6.0, source-map@~0.6.0: version "0.6.1" resolved "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== @@ -9122,15 +9476,10 @@ sprintf-js@~1.0.2: resolved "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== -stable@^0.1.8: - version "0.1.8" - resolved "https://registry.npmjs.org/stable/-/stable-0.1.8.tgz" - integrity sha512-ji9qxRnOVfcuLDySj9qzhGSEFVobyt1kIOSkj1qZzYLzq7Tos/oUUWvotUPQLlrsidqsK6tBH89Bc9kL5zHA6w== - -state-toggle@^1.0.0: - version "1.0.3" - resolved "https://registry.npmjs.org/state-toggle/-/state-toggle-1.0.3.tgz" - integrity sha512-d/5Z4/2iiCnHw6Xzghyhb+GcmF89bxwgXG60wjIiZaxnymbyOmI8Hk4VqHXiVVp6u2ysaskFfXg3ekCj4WNftQ== +srcset@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/srcset/-/srcset-4.0.0.tgz#336816b665b14cd013ba545b6fe62357f86e65f4" + integrity sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw== statuses@2.0.1: version "2.0.1" @@ -9142,10 +9491,10 @@ statuses@2.0.1: resolved "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz" integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== -std-env@^3.0.1: - version "3.1.1" - resolved "https://registry.npmjs.org/std-env/-/std-env-3.1.1.tgz" - integrity sha512-/c645XdExBypL01TpFKiG/3RAa/Qmu+zRi0MwAmrdEkwHNuN0ebo8ccAXBBDa5Z0QOJgBskUIbuCK91x0sCVEw== +std-env@^3.7.0: + version "3.7.0" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2" + integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== stdin-discarder@^0.1.0: version "0.1.0" @@ -9165,7 +9514,7 @@ streamx@^2.15.0, streamx@^2.18.0: optionalDependencies: bare-events "^2.2.0" -"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -9269,36 +9618,36 @@ strip-json-comments@~2.0.1: resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz" integrity sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ== -style-to-object@0.3.0, style-to-object@^0.3.0: - version "0.3.0" - resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.3.0.tgz" - integrity sha512-CzFnRRXhzWIdItT3OmF8SQfWyahHhjq3HwcMNCNLn+N7klOOqPjMeG/4JSu77D7ypZdGvSzvkrbyeTMizz2VrA== - dependencies: - inline-style-parser "0.1.1" - -style-to-object@^0.4.0, style-to-object@^0.4.1: +style-to-object@^0.4.0: version "0.4.1" resolved "https://registry.npmjs.org/style-to-object/-/style-to-object-0.4.1.tgz" integrity sha512-HFpbb5gr2ypci7Qw+IOhnP2zOU7e77b+rzM+wTzXzfi1PrtBCX0E7Pk4wL4iTLnhzZ+JgEGAhX81ebTg/aYjQw== dependencies: inline-style-parser "0.1.1" -stylehacks@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/stylehacks/-/stylehacks-5.1.0.tgz" - integrity sha512-SzLmvHQTrIWfSgljkQCw2++C9+Ne91d/6Sp92I8c5uHTcy/PgeHamwITIbBW9wnFTY/3ZfSXR9HIL6Ikqmcu6Q== +style-to-object@^1.0.0: + version "1.0.8" + resolved "https://registry.yarnpkg.com/style-to-object/-/style-to-object-1.0.8.tgz#67a29bca47eaa587db18118d68f9d95955e81292" + integrity sha512-xT47I/Eo0rwJmaXC4oilDGDWLohVhR6o/xAQcPQN8q6QBuZVL8qMYL85kLmST5cPjAorwvqIA4qXTRQoYHaL6g== dependencies: - browserslist "^4.16.6" - postcss-selector-parser "^6.0.4" + inline-style-parser "0.2.4" + +stylehacks@^6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/stylehacks/-/stylehacks-6.1.1.tgz#543f91c10d17d00a440430362d419f79c25545a6" + integrity sha512-gSTTEQ670cJNoaeIp9KX6lZmm8LJ3jPB5yJmX8Zq/wQxOsAFXV3qjWzHas3YYk1qesuVIyYWWUpZ0vSE/dTSGg== + dependencies: + browserslist "^4.23.0" + postcss-selector-parser "^6.0.16" stylis@4.2.0: version "4.2.0" - resolved "https://registry.npmjs.org/stylis/-/stylis-4.2.0.tgz" + resolved "https://registry.yarnpkg.com/stylis/-/stylis-4.2.0.tgz#79daee0208964c8fe695a42fcffcac633a211a51" integrity sha512-Orov6g6BB1sDfYgzWfTHDOxamtX1bE/zo104Dh9e6fqJ3PooipYyfJ0pUmrZO2wAvO8YbEyeFrkV91XTsGMSrw== supports-color@^5.3.0: version "5.5.0" - resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz" + resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== dependencies: has-flag "^3.0.0" @@ -9319,26 +9668,26 @@ supports-color@^8.0.0: supports-preserve-symlinks-flag@^1.0.0: version "1.0.0" - resolved "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz" + resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -svg-parser@^2.0.2: +svg-parser@^2.0.4: version "2.0.4" - resolved "https://registry.npmjs.org/svg-parser/-/svg-parser-2.0.4.tgz" + resolved "https://registry.yarnpkg.com/svg-parser/-/svg-parser-2.0.4.tgz#fdc2e29e13951736140b76cb122c8ee6630eb6b5" integrity sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ== -svgo@^2.5.0, svgo@^2.7.0: - version "2.8.0" - resolved "https://registry.npmjs.org/svgo/-/svgo-2.8.0.tgz" - integrity sha512-+N/Q9kV1+F+UeWYoSiULYo4xYSDQlTgb+ayMobAXPwMnLvop7oxKMo9OzIrX5x3eS4L4f2UHhc9axXwY8DpChg== +svgo@^3.0.2, svgo@^3.2.0: + version "3.3.2" + resolved "https://registry.yarnpkg.com/svgo/-/svgo-3.3.2.tgz#ad58002652dffbb5986fc9716afe52d869ecbda8" + integrity sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw== dependencies: "@trysound/sax" "0.2.0" commander "^7.2.0" - css-select "^4.1.3" - css-tree "^1.1.3" - csso "^4.2.0" + css-select "^5.1.0" + css-tree "^2.3.1" + css-what "^6.1.0" + csso "^5.0.5" picocolors "^1.0.0" - stable "^0.1.8" tailwind-theme-switcher@^1.0.2: version "1.0.2" @@ -9388,7 +9737,7 @@ tapable@^1.0.0: resolved "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz" integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== -tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0: +tapable@^2.0.0, tapable@^2.1.1, tapable@^2.2.0, tapable@^2.2.1: version "2.2.1" resolved "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz" integrity sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ== @@ -9434,7 +9783,7 @@ tar-stream@^3.1.5: fast-fifo "^1.2.0" streamx "^2.15.0" -terser-webpack-plugin@^5.3.10: +terser-webpack-plugin@^5.3.10, terser-webpack-plugin@^5.3.9: version "5.3.10" resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-5.3.10.tgz#904f4c9193c6fd2a03f693a2150c62a92f40d199" integrity sha512-BKFPWlPDndPs+NGGCr1U59t0XScL5317Y0UReNrHaw9/FwhPENlq6bfgs+4yPfyP51vqC1bQ4rp1EfXW5ZSH9w== @@ -9445,18 +9794,7 @@ terser-webpack-plugin@^5.3.10: serialize-javascript "^6.0.1" terser "^5.26.0" -terser-webpack-plugin@^5.3.3: - version "5.3.3" - resolved "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.3.tgz" - integrity sha512-Fx60G5HNYknNTNQnzQ1VePRuu89ZVYWfjRAeT5rITuCY/1b08s49e5kSQwHDirKZWuoKOBRFS98EUUoZ9kLEwQ== - dependencies: - "@jridgewell/trace-mapping" "^0.3.7" - jest-worker "^27.4.5" - schema-utils "^3.1.1" - serialize-javascript "^6.0.0" - terser "^5.7.2" - -terser@^5.10.0, terser@^5.7.2: +terser@^5.10.0: version "5.14.1" resolved "https://registry.npmjs.org/terser/-/terser-5.14.1.tgz" integrity sha512-+ahUAE+iheqBTDxXhTisdA8hgvbEG1hHOQ9xmNjeUJSoi6DU/gMrKNcfZjHkyY6Alnuyc+ikYJaxxfHkT3+WuQ== @@ -9466,6 +9804,16 @@ terser@^5.10.0, terser@^5.7.2: commander "^2.20.0" source-map-support "~0.5.20" +terser@^5.15.1: + version "5.36.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.36.0.tgz#8b0dbed459ac40ff7b4c9fd5a3a2029de105180e" + integrity sha512-IYV9eNMuFAV4THUspIRXkLakHnV6XO7FEdtKjf/mDyrnqUg9LnlOn6/RwRvM9SZjR4GUq8Nk8zj67FzVARr74w== + dependencies: + "@jridgewell/source-map" "^0.3.3" + acorn "^8.8.2" + commander "^2.20.0" + source-map-support "~0.5.20" + terser@^5.26.0: version "5.31.6" resolved "https://registry.yarnpkg.com/terser/-/terser-5.31.6.tgz#c63858a0f0703988d0266a82fcbf2d7ba76422b1" @@ -9498,7 +9846,7 @@ tiny-invariant@^1.0.2: resolved "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.2.0.tgz" integrity sha512-1Uhn/aqw5C6RI4KejVeTg6mIS7IqxnLJ8Mv2tV5rTc0qWobay7pDUz6Wi392Cnc8ak1H0F2cjoRzb2/AW4+Fvg== -tiny-warning@^1.0.0, tiny-warning@^1.0.3: +tiny-warning@^1.0.0: version "1.0.3" resolved "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz" integrity sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA== @@ -9512,14 +9860,9 @@ tmp@^0.2.1: to-fast-properties@^2.0.0: version "2.0.0" - resolved "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz" + resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" integrity sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog== -to-readable-stream@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz" - integrity sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q== - to-regex-range@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz" @@ -9540,31 +9883,16 @@ toidentifier@1.0.1: resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz" integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA== -totalist@^1.0.0: - version "1.1.0" - resolved "https://registry.npmjs.org/totalist/-/totalist-1.1.0.tgz" - integrity sha512-gduQwd1rOdDMGxFG1gEvhV88Oirdo2p+KjoYFU7k2g+i7n6AFFbDQ5kMPUsW0pNbfQsB/cwXvT1i4Bue0s9g5g== - -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz" - integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== +totalist@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/totalist/-/totalist-3.0.1.tgz#ba3a3d600c915b1a97872348f79c127475f6acf8" + integrity sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ== trim-lines@^3.0.0: version "3.0.1" resolved "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz" integrity sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg== -trim-trailing-lines@^1.0.0: - version "1.1.4" - resolved "https://registry.npmjs.org/trim-trailing-lines/-/trim-trailing-lines-1.1.4.tgz" - integrity sha512-rjUWSqnfTNrjbB9NQWfPMH/xRK1deHeGsHoVfpxJ++XeYXE0d6B1En37AHfw3jtfTU7dzMzZL2jjpe8Qb5gLIQ== - -trim@0.0.1: - version "0.0.1" - resolved "https://registry.npmjs.org/trim/-/trim-0.0.1.tgz" - integrity sha512-YzQV+TZg4AxpKxaTHK3c3D+kRDCGVEE7LemdlQZoQXn0iennk10RsIoY6ikzAqJTc9Xjl9C1/waHom/J86ziAQ== - trough@^1.0.0: version "1.0.5" resolved "https://registry.npmjs.org/trough/-/trough-1.0.5.tgz" @@ -9575,11 +9903,16 @@ trough@^2.0.0: resolved "https://registry.npmjs.org/trough/-/trough-2.1.0.tgz" integrity sha512-AqTiAOLcj85xS7vQ8QkAV41hPDIJ71XJB4RCUrzo/1GM2CQwhkJGaf9Hgr7BOugMRpgGUrqRg/DrBDl4H40+8g== -tslib@^2.0.3, tslib@^2.1.0, tslib@^2.4.0: +tslib@^2.0.3: version "2.4.0" resolved "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz" integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== +tslib@^2.6.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" @@ -9587,10 +9920,10 @@ tunnel-agent@^0.6.0: dependencies: safe-buffer "^5.0.1" -type-fest@^0.20.2: - version "0.20.2" - resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz" - integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== +type-fest@^0.21.3: + version "0.21.3" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.21.3.tgz#d260a24b0198436e133fa26a524a6d65fa3b2e37" + integrity sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w== type-fest@^1.0.1: version "1.4.0" @@ -9622,29 +9955,21 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@^4.7.4: - version "4.7.4" - resolved "https://registry.npmjs.org/typescript/-/typescript-4.7.4.tgz" - integrity sha512-C0WQT0gezHuw6AdY1M2jxUO83Rjf0HP7Sk1DtXj6j1EwkQNZrHAg2XPWlq62oqEhYvONq5pkC2Y9oPljWToLmQ== - -ua-parser-js@^0.7.30: - version "0.7.31" - resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.31.tgz" - integrity sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ== - -unherit@^1.0.4: - version "1.1.3" - resolved "https://registry.npmjs.org/unherit/-/unherit-1.1.3.tgz" - integrity sha512-Ft16BJcnapDKp0+J/rqFC3Rrk6Y/Ng4nzsC028k2jdDII/rdZ7Wd3pPT/6+vIIxRagwRc9K0IUX0Ra4fKvw+WQ== - dependencies: - inherits "^2.0.0" - xtend "^4.0.0" +typescript@^5.6.3: + version "5.6.3" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.6.3.tgz#5f3449e31c9d94febb17de03cc081dd56d81db5b" + integrity sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw== unicode-canonical-property-names-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.0.tgz" integrity sha512-yY5PpDlfVIU5+y/BSCxAJRBIS1Zc2dDG3Ujq+sR0U+JjUevW2JhocOF+soROYDSaAezOzOKuyyixhD6mBknSmQ== +unicode-emoji-modifier-base@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unicode-emoji-modifier-base/-/unicode-emoji-modifier-base-1.0.0.tgz#dbbd5b54ba30f287e2a8d5a249da6c0cef369459" + integrity sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g== + unicode-match-property-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz" @@ -9658,37 +9983,30 @@ unicode-match-property-value-ecmascript@^2.0.0: resolved "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.0.0.tgz" integrity sha512-7Yhkc0Ye+t4PNYzOGKedDhXbYIBe1XEQYQxOPyhcXNMJ0WCABqqj6ckydd6pWRZTHV4GuCPKdBAUiMc60tsKVw== +unicode-match-property-value-ecmascript@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz#a0401aee72714598f739b68b104e4fe3a0cb3c71" + integrity sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg== + unicode-property-aliases-ecmascript@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.0.0.tgz" integrity sha512-5Zfuy9q/DFr4tfO7ZPeVXb1aPoeQSdeFMLpYuFebehDAhbuevLs5yxSZmIFN1tP5F9Wl4IpJrYojg85/zgyZHQ== -unified@9.2.0: - version "9.2.0" - resolved "https://registry.npmjs.org/unified/-/unified-9.2.0.tgz" - integrity sha512-vx2Z0vY+a3YoTj8+pttM3tiJHCwY5UFbYdiWrwBEbHmK8pvsPj2rtAX2BFfgXen8T39CJWblWRDT4L5WGXtDdg== - dependencies: - bail "^1.0.0" - extend "^3.0.0" - is-buffer "^2.0.0" - is-plain-obj "^2.0.0" - trough "^1.0.0" - vfile "^4.0.0" - -unified@^10.0.0: - version "10.1.2" - resolved "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz" - integrity sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q== +unified@^11.0.0, unified@^11.0.3, unified@^11.0.4: + version "11.0.5" + resolved "https://registry.yarnpkg.com/unified/-/unified-11.0.5.tgz#f66677610a5c0a9ee90cab2b8d4d66037026d9e1" + integrity sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA== dependencies: - "@types/unist" "^2.0.0" + "@types/unist" "^3.0.0" bail "^2.0.0" + devlop "^1.0.0" extend "^3.0.0" - is-buffer "^2.0.0" is-plain-obj "^4.0.0" trough "^2.0.0" - vfile "^5.0.0" + vfile "^6.0.0" -unified@^9.0.0, unified@^9.2.2: +unified@^9.0.0: version "9.2.2" resolved "https://registry.npmjs.org/unified/-/unified-9.2.2.tgz" integrity sha512-Sg7j110mtefBD+qunSLO1lqOEKdrwBFBrR6Qd8f4uwkhWNlbkaqwHse6e7QvD3AP/MNoJdEDLaf8OxYyoWgorQ== @@ -9700,13 +10018,6 @@ unified@^9.0.0, unified@^9.2.2: trough "^1.0.0" vfile "^4.0.0" -unique-string@^2.0.0: - version "2.0.0" - resolved "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz" - integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg== - dependencies: - crypto-random-string "^2.0.0" - unique-string@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-3.0.0.tgz#84a1c377aff5fd7a8bc6b55d8244b2bd90d75b9a" @@ -9714,11 +10025,6 @@ unique-string@^3.0.0: dependencies: crypto-random-string "^4.0.0" -unist-builder@2.0.3, unist-builder@^2.0.0: - version "2.0.3" - resolved "https://registry.npmjs.org/unist-builder/-/unist-builder-2.0.3.tgz" - integrity sha512-f98yt5pnlMWlzP539tPc4grGMsFaQQlP/vM396b00jngsiINumNmsY8rkXjfoi1c6QaM8nQ3vaGDuoKWbe/1Uw== - unist-util-find-after@^3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-3.0.0.tgz" @@ -9726,68 +10032,31 @@ unist-util-find-after@^3.0.0: dependencies: unist-util-is "^4.0.0" -unist-util-generated@^1.0.0: - version "1.1.6" - resolved "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-1.1.6.tgz" - integrity sha512-cln2Mm1/CZzN5ttGK7vkoGw+RZ8VcUH6BtGbq98DDtRGquAAOXig1mrBQYelOwMXYS8rK+vZDyyojSjp7JX+Lg== - -unist-util-generated@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz" - integrity sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A== - unist-util-is@^4.0.0, unist-util-is@^4.0.2: version "4.1.0" resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz" integrity sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg== -unist-util-is@^5.0.0: - version "5.2.1" - resolved "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz" - integrity sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw== - dependencies: - "@types/unist" "^2.0.0" - -unist-util-position-from-estree@^1.0.0, unist-util-position-from-estree@^1.1.0: - version "1.1.2" - resolved "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-1.1.2.tgz" - integrity sha512-poZa0eXpS+/XpoQwGwl79UUdea4ol2ZuCYguVaJS4qzIOMDzbqz8a3erUCOmubSZkaOuGamb3tX790iwOIROww== - dependencies: - "@types/unist" "^2.0.0" - -unist-util-position@^3.0.0: - version "3.1.0" - resolved "https://registry.npmjs.org/unist-util-position/-/unist-util-position-3.1.0.tgz" - integrity sha512-w+PkwCbYSFw8vpgWD0v7zRCl1FpY3fjDSQ3/N/wNd9Ffa4gPi8+4keqt99N3XW6F99t/mUzp2xAhNmfKWp95QA== - -unist-util-position@^4.0.0: - version "4.0.4" - resolved "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz" - integrity sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg== - dependencies: - "@types/unist" "^2.0.0" - -unist-util-remove-position@^2.0.0: - version "2.0.1" - resolved "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-2.0.1.tgz" - integrity sha512-fDZsLYIe2uT+oGFnuZmy73K6ZxOPG/Qcm+w7jbEjaFcJgbQ6cqjs/eSPzXhsmGpAsWPkqZM9pYjww5QTn3LHMA== +unist-util-is@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/unist-util-is/-/unist-util-is-6.0.0.tgz#b775956486aff107a9ded971d996c173374be424" + integrity sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw== dependencies: - unist-util-visit "^2.0.0" + "@types/unist" "^3.0.0" -unist-util-remove-position@^4.0.0: - version "4.0.2" - resolved "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-4.0.2.tgz" - integrity sha512-TkBb0HABNmxzAcfLf4qsIbFbaPDvMO6wa3b3j4VcEzFVaw1LBKwnW4/sRJ/atSLSzoIg41JWEdnE7N6DIhGDGQ== +unist-util-position-from-estree@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz#d94da4df596529d1faa3de506202f0c9a23f2200" + integrity sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ== dependencies: - "@types/unist" "^2.0.0" - unist-util-visit "^4.0.0" + "@types/unist" "^3.0.0" -unist-util-remove@^2.0.0: - version "2.1.0" - resolved "https://registry.npmjs.org/unist-util-remove/-/unist-util-remove-2.1.0.tgz" - integrity sha512-J8NYPyBm4baYLdCbjmf1bhPu45Cr1MWTm77qd9istEkzWpnN6O9tMsEbB2JhNnBCqGENRqEWomQ+He6au0B27Q== +unist-util-position@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-position/-/unist-util-position-5.0.0.tgz#678f20ab5ca1207a97d7ea8a388373c9cf896be4" + integrity sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA== dependencies: - unist-util-is "^4.0.0" + "@types/unist" "^3.0.0" unist-util-stringify-position@^2.0.0: version "2.0.3" @@ -9796,12 +10065,12 @@ unist-util-stringify-position@^2.0.0: dependencies: "@types/unist" "^2.0.2" -unist-util-stringify-position@^3.0.0: - version "3.0.3" - resolved "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz" - integrity sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg== +unist-util-stringify-position@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz#449c6e21a880e0855bf5aabadeb3a740314abac2" + integrity sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ== dependencies: - "@types/unist" "^2.0.0" + "@types/unist" "^3.0.0" unist-util-visit-parents@^3.0.0: version "3.1.1" @@ -9811,15 +10080,15 @@ unist-util-visit-parents@^3.0.0: "@types/unist" "^2.0.0" unist-util-is "^4.0.0" -unist-util-visit-parents@^5.1.1: - version "5.1.3" - resolved "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz" - integrity sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg== +unist-util-visit-parents@^6.0.0: + version "6.0.1" + resolved "https://registry.yarnpkg.com/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz#4d5f85755c3b8f0dc69e21eca5d6d82d22162815" + integrity sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw== dependencies: - "@types/unist" "^2.0.0" - unist-util-is "^5.0.0" + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" -unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: +unist-util-visit@^2.0.0: version "2.0.3" resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz" integrity sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q== @@ -9828,14 +10097,14 @@ unist-util-visit@2.0.3, unist-util-visit@^2.0.0, unist-util-visit@^2.0.3: unist-util-is "^4.0.0" unist-util-visit-parents "^3.0.0" -unist-util-visit@^4.0.0: - version "4.1.2" - resolved "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz" - integrity sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg== +unist-util-visit@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/unist-util-visit/-/unist-util-visit-5.0.0.tgz#a7de1f31f72ffd3519ea71814cccf5fd6a9217d6" + integrity sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg== dependencies: - "@types/unist" "^2.0.0" - unist-util-is "^5.0.0" - unist-util-visit-parents "^5.1.1" + "@types/unist" "^3.0.0" + unist-util-is "^6.0.0" + unist-util-visit-parents "^6.0.0" universalify@^2.0.0: version "2.0.0" @@ -9855,33 +10124,13 @@ update-browserslist-db@^1.0.9: escalade "^3.1.1" picocolors "^1.0.0" -update-browserslist-db@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz#7ca61c0d8650766090728046e416a8cde682859e" - integrity sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ== - dependencies: - escalade "^3.1.2" - picocolors "^1.0.1" - -update-notifier@^5.1.0: - version "5.1.0" - resolved "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz" - integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw== +update-browserslist-db@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz#80846fba1d79e82547fb661f8d141e0945755fe5" + integrity sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A== dependencies: - boxen "^5.0.0" - chalk "^4.1.0" - configstore "^5.0.1" - has-yarn "^2.1.0" - import-lazy "^2.1.0" - is-ci "^2.0.0" - is-installed-globally "^0.4.0" - is-npm "^5.0.0" - is-yarn-global "^0.3.0" - latest-version "^5.1.0" - pupa "^2.1.1" - semver "^7.3.4" - semver-diff "^3.1.1" - xdg-basedir "^4.0.0" + escalade "^3.2.0" + picocolors "^1.1.0" update-notifier@^6.0.2: version "6.0.2" @@ -9919,35 +10168,6 @@ url-loader@^4.1.1: mime-types "^2.1.27" schema-utils "^3.0.0" -url-parse-lax@^3.0.0: - version "3.0.0" - resolved "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz" - integrity sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ== - dependencies: - prepend-http "^2.0.0" - -use-composed-ref@^1.3.0: - version "1.3.0" - resolved "https://registry.npmjs.org/use-composed-ref/-/use-composed-ref-1.3.0.tgz" - integrity sha512-GLMG0Jc/jiKov/3Ulid1wbv3r54K9HlMW29IWcDFPEqFkSO2nS0MuefWgMJpeHQ9YJeXDL3ZUF+P3jdXlZX/cQ== - -use-isomorphic-layout-effect@^1.1.1: - version "1.1.2" - resolved "https://registry.npmjs.org/use-isomorphic-layout-effect/-/use-isomorphic-layout-effect-1.1.2.tgz" - integrity sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA== - -use-latest@^1.2.1: - version "1.2.1" - resolved "https://registry.npmjs.org/use-latest/-/use-latest-1.2.1.tgz" - integrity sha512-xA+AVm/Wlg3e2P/JiItTziwS7FK92LWrDB0p+hgXloIMuVCeJJ8v6f0eeHyPZaJrM+usM1FkFfbNCrJGs8A/zw== - dependencies: - use-isomorphic-layout-effect "^1.1.1" - -use-sync-external-store@^1.2.0: - version "1.2.0" - resolved "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz" - integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA== - util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz" @@ -9973,16 +10193,6 @@ uuid@^8.3.2: resolved "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -uvu@^0.5.0: - version "0.5.6" - resolved "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz" - integrity sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA== - dependencies: - dequal "^2.0.0" - diff "^5.0.0" - kleur "^4.0.3" - sade "^1.7.3" - value-equal@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz" @@ -9993,18 +10203,18 @@ vary@~1.1.2: resolved "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz" integrity sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg== -vfile-location@^3.0.0, vfile-location@^3.2.0: +vfile-location@^3.2.0: version "3.2.0" resolved "https://registry.npmjs.org/vfile-location/-/vfile-location-3.2.0.tgz" integrity sha512-aLEIZKv/oxuCDZ8lkJGhuhztf/BW4M+iHdCwglA/eWc+vtuRFJj8EtgceYFX4LRjOhCAAiNHsKGssC6onJ+jbA== -vfile-location@^4.0.0: - version "4.1.0" - resolved "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz" - integrity sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw== +vfile-location@^5.0.0: + version "5.0.3" + resolved "https://registry.yarnpkg.com/vfile-location/-/vfile-location-5.0.3.tgz#cb9eacd20f2b6426d19451e0eafa3d0a846225c3" + integrity sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg== dependencies: - "@types/unist" "^2.0.0" - vfile "^5.0.0" + "@types/unist" "^3.0.0" + vfile "^6.0.0" vfile-message@^2.0.0: version "2.0.4" @@ -10014,13 +10224,13 @@ vfile-message@^2.0.0: "@types/unist" "^2.0.0" unist-util-stringify-position "^2.0.0" -vfile-message@^3.0.0: - version "3.1.4" - resolved "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz" - integrity sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw== +vfile-message@^4.0.0: + version "4.0.2" + resolved "https://registry.yarnpkg.com/vfile-message/-/vfile-message-4.0.2.tgz#c883c9f677c72c166362fd635f21fc165a7d1181" + integrity sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw== dependencies: - "@types/unist" "^2.0.0" - unist-util-stringify-position "^3.0.0" + "@types/unist" "^3.0.0" + unist-util-stringify-position "^4.0.0" vfile@^4.0.0: version "4.2.1" @@ -10032,26 +10242,13 @@ vfile@^4.0.0: unist-util-stringify-position "^2.0.0" vfile-message "^2.0.0" -vfile@^5.0.0: - version "5.3.7" - resolved "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz" - integrity sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g== - dependencies: - "@types/unist" "^2.0.0" - is-buffer "^2.0.0" - unist-util-stringify-position "^3.0.0" - vfile-message "^3.0.0" - -wait-on@^6.0.1: - version "6.0.1" - resolved "https://registry.npmjs.org/wait-on/-/wait-on-6.0.1.tgz" - integrity sha512-zht+KASY3usTY5u2LgaNqn/Cd8MukxLGjdcZxT2ns5QzDmTFc4XoWBgC+C/na+sMRZTuVygQoMYwdcVjHnYIVw== +vfile@^6.0.0, vfile@^6.0.1: + version "6.0.3" + resolved "https://registry.yarnpkg.com/vfile/-/vfile-6.0.3.tgz#3652ab1c496531852bf55a6bac57af981ebc38ab" + integrity sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q== dependencies: - axios "^0.25.0" - joi "^17.6.0" - lodash "^4.17.21" - minimist "^1.2.5" - rxjs "^7.5.4" + "@types/unist" "^3.0.0" + vfile-message "^4.0.0" watchpack@^2.4.1: version "2.4.2" @@ -10073,27 +10270,30 @@ web-namespaces@^1.0.0: resolved "https://registry.npmjs.org/web-namespaces/-/web-namespaces-1.1.4.tgz" integrity sha512-wYxSGajtmoP4WxfejAPIr4l0fVh+jeMXZb08wNc0tMg6xsfZXj3cECqIK0G7ZAqUq0PP8WlMDtaOGVBTAWztNw== -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz" - integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== +web-namespaces@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/web-namespaces/-/web-namespaces-2.0.1.tgz#1010ff7c650eccb2592cebeeaf9a1b253fd40692" + integrity sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ== -webpack-bundle-analyzer@^4.5.0: - version "4.5.0" - resolved "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.5.0.tgz" - integrity sha512-GUMZlM3SKwS8Z+CKeIFx7CVoHn3dXFcUAjT/dcZQQmfSZGvitPfMob2ipjai7ovFFqPvTqkEZ/leL4O0YOdAYQ== +webpack-bundle-analyzer@^4.10.2: + version "4.10.2" + resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-4.10.2.tgz#633af2862c213730be3dbdf40456db171b60d5bd" + integrity sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw== dependencies: + "@discoveryjs/json-ext" "0.5.7" acorn "^8.0.4" acorn-walk "^8.0.0" - chalk "^4.1.0" commander "^7.2.0" + debounce "^1.2.1" + escape-string-regexp "^4.0.0" gzip-size "^6.0.0" - lodash "^4.17.20" + html-escaper "^2.0.2" opener "^1.5.2" - sirv "^1.0.7" + picocolors "^1.0.0" + sirv "^2.0.3" ws "^7.3.1" -webpack-dev-middleware@^5.3.1: +webpack-dev-middleware@^5.3.4: version "5.3.4" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz#eb7b39281cbce10e104eb2b8bf2b63fce49a3517" integrity sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q== @@ -10104,10 +10304,10 @@ webpack-dev-middleware@^5.3.1: range-parser "^1.2.1" schema-utils "^4.0.0" -webpack-dev-server@^4.9.3: - version "4.9.3" - resolved "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.9.3.tgz" - integrity sha512-3qp/eoboZG5/6QgiZ3llN8TUzkSpYg1Ko9khWX1h40MIEUNS2mDoIa8aXsPfskER+GbTvs/IJZ1QTBBhhuetSw== +webpack-dev-server@^4.15.2: + version "4.15.2" + resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-4.15.2.tgz#9e0c70a42a012560860adb186986da1248333173" + integrity sha512-0XavAZbNJ5sDrCbkpWL8mia0o5WPOd2YGtxrEiZkBK9FjLppIUK2TgxK6qGD2P3hUXTJNNPVibrerKcx5WkR1g== dependencies: "@types/bonjour" "^3.5.9" "@types/connect-history-api-fallback" "^1.3.5" @@ -10115,7 +10315,7 @@ webpack-dev-server@^4.9.3: "@types/serve-index" "^1.9.1" "@types/serve-static" "^1.13.10" "@types/sockjs" "^0.3.33" - "@types/ws" "^8.5.1" + "@types/ws" "^8.5.5" ansi-html-community "^0.0.8" bonjour-service "^1.0.11" chokidar "^3.5.3" @@ -10128,42 +10328,53 @@ webpack-dev-server@^4.9.3: html-entities "^2.3.2" http-proxy-middleware "^2.0.3" ipaddr.js "^2.0.1" + launch-editor "^2.6.0" open "^8.0.9" p-retry "^4.5.0" rimraf "^3.0.2" schema-utils "^4.0.0" - selfsigned "^2.0.1" + selfsigned "^2.1.1" serve-index "^1.9.1" sockjs "^0.3.24" spdy "^4.0.2" - webpack-dev-middleware "^5.3.1" - ws "^8.4.2" + webpack-dev-middleware "^5.3.4" + ws "^8.13.0" -webpack-merge@^5.8.0: - version "5.8.0" - resolved "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz" - integrity sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q== +webpack-merge@^5.9.0: + version "5.10.0" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-5.10.0.tgz#a3ad5d773241e9c682803abf628d4cd62b8a4177" + integrity sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA== dependencies: clone-deep "^4.0.1" + flat "^5.0.2" wildcard "^2.0.0" -webpack-sources@^3.2.2, webpack-sources@^3.2.3: +webpack-merge@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/webpack-merge/-/webpack-merge-6.0.1.tgz#50c776868e080574725abc5869bd6e4ef0a16c6a" + integrity sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg== + dependencies: + clone-deep "^4.0.1" + flat "^5.0.2" + wildcard "^2.0.1" + +webpack-sources@^3.2.3: version "3.2.3" resolved "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz" integrity sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w== -webpack@^5.73.0: - version "5.94.0" - resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.94.0.tgz#77a6089c716e7ab90c1c67574a28da518a20970f" - integrity sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg== +webpack@^5.88.1, webpack@^5.95.0: + version "5.96.1" + resolved "https://registry.yarnpkg.com/webpack/-/webpack-5.96.1.tgz#3676d1626d8312b6b10d0c18cc049fba7ac01f0c" + integrity sha512-l2LlBSvVZGhL4ZrPwyr8+37AunkcYj5qh8o6u2/2rzoPc8gxFJkLj1WxNgooi9pnoc06jh0BjuXnamM4qlujZA== dependencies: - "@types/estree" "^1.0.5" + "@types/eslint-scope" "^3.7.7" + "@types/estree" "^1.0.6" "@webassemblyjs/ast" "^1.12.1" "@webassemblyjs/wasm-edit" "^1.12.1" "@webassemblyjs/wasm-parser" "^1.12.1" - acorn "^8.7.1" - acorn-import-attributes "^1.9.5" - browserslist "^4.21.10" + acorn "^8.14.0" + browserslist "^4.24.0" chrome-trace-event "^1.0.2" enhanced-resolve "^5.17.1" es-module-lexer "^1.2.1" @@ -10181,15 +10392,19 @@ webpack@^5.73.0: watchpack "^2.4.1" webpack-sources "^3.2.3" -webpackbar@^5.0.2: - version "5.0.2" - resolved "https://registry.npmjs.org/webpackbar/-/webpackbar-5.0.2.tgz" - integrity sha512-BmFJo7veBDgQzfWXl/wwYXr/VFus0614qZ8i9znqcl9fnEdiVkdbi0TedLQ6xAK92HZHDJ0QmyQ0fmuZPAgCYQ== +webpackbar@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/webpackbar/-/webpackbar-6.0.1.tgz#5ef57d3bf7ced8b19025477bc7496ea9d502076b" + integrity sha512-TnErZpmuKdwWBdMoexjio3KKX6ZtoKHRVvLIU0A47R0VVBDtx3ZyOJDktgYixhoJokZTYTt1Z37OkO9pnGJa9Q== dependencies: - chalk "^4.1.0" - consola "^2.15.3" + ansi-escapes "^4.3.2" + chalk "^4.1.2" + consola "^3.2.3" + figures "^3.2.0" + markdown-table "^2.0.0" pretty-time "^1.1.0" - std-env "^3.0.1" + std-env "^3.7.0" + wrap-ansi "^7.0.0" websocket-driver@>=0.5.1, websocket-driver@^0.7.4: version "0.7.4" @@ -10205,14 +10420,6 @@ websocket-extensions@>=0.1.1: resolved "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz" integrity sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg== -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz" - integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw== - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - which@^1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" @@ -10234,13 +10441,6 @@ wide-align@^1.1.2: dependencies: string-width "^1.0.2 || 2 || 3 || 4" -widest-line@^3.1.0: - version "3.1.0" - resolved "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz" - integrity sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg== - dependencies: - string-width "^4.0.0" - widest-line@^4.0.1: version "4.0.1" resolved "https://registry.npmjs.org/widest-line/-/widest-line-4.0.1.tgz" @@ -10253,9 +10453,14 @@ wildcard@^2.0.0: resolved "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz" integrity sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw== +wildcard@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67" + integrity sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ== + wrap-ansi@^7.0.0: version "7.0.0" - resolved "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== dependencies: ansi-styles "^4.0.0" @@ -10285,7 +10490,7 @@ wrappy@1: resolved "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz" integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ== -write-file-atomic@^3.0.0, write-file-atomic@^3.0.3: +write-file-atomic@^3.0.3: version "3.0.3" resolved "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz" integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== @@ -10300,15 +10505,10 @@ ws@^7.3.1: resolved "https://registry.npmjs.org/ws/-/ws-7.5.8.tgz" integrity sha512-ri1Id1WinAX5Jqn9HejiGb8crfRio0Qgu8+MtL36rlTA6RLsMdWt1Az/19A2Qij6uSHUMphEFaTKa4WG+UNHNw== -ws@^8.4.2: - version "8.8.0" - resolved "https://registry.npmjs.org/ws/-/ws-8.8.0.tgz" - integrity sha512-JDAgSYQ1ksuwqfChJusw1LSJ8BizJ2e/vVu5Lxjq3YvNJNlROv1ui4i+c/kUUrPheBvQl4c5UbERhTwKa6QBJQ== - -xdg-basedir@^4.0.0: - version "4.0.0" - resolved "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz" - integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== +ws@^8.13.0: + version "8.18.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.18.0.tgz#0d7505a6eafe2b0e712d232b42279f53bc289bbc" + integrity sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw== xdg-basedir@^5.0.1, xdg-basedir@^5.1.0: version "5.1.0" @@ -10322,7 +10522,7 @@ xml-js@^1.6.11: dependencies: sax "^1.2.4" -xtend@^4.0.0, xtend@^4.0.1, xtend@^4.0.2: +xtend@^4.0.0, xtend@^4.0.2: version "4.0.2" resolved "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== @@ -10352,6 +10552,11 @@ yocto-queue@^0.1.0: resolved "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz" integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q== +yocto-queue@^1.0.0: + version "1.1.1" + resolved "https://registry.yarnpkg.com/yocto-queue/-/yocto-queue-1.1.1.tgz#fef65ce3ac9f8a32ceac5a634f74e17e5b232110" + integrity sha512-b4JR1PFR10y1mKjhHY9LaGo6tmrgjit7hxVIeAmyMw3jegXR4dhYqLaQF5zMXZxY7tLpMyJeLjr1C4rLmkVe8g== + zwitch@^1.0.0: version "1.0.5" resolved "https://registry.npmjs.org/zwitch/-/zwitch-1.0.5.tgz" From cfe149943ec45ea7db10c01eb11e7a6852434211 Mon Sep 17 00:00:00 2001 From: Jakub Dardzinski Date: Tue, 5 Nov 2024 13:30:14 +0100 Subject: [PATCH 46/89] Add assertions for format in test events. (#3221) Signed-off-by: Jakub Dardzinski --- .pre-commit-config.yaml | 1 + .pre_commit/json-schema/test-facets.sh | 2 +- spec/tests/EnvironmentVariablesRunFacet/1.json | 16 ++++++++++++++++ spec/tests/NominalTimeRunFacet/1.json | 4 ++-- 4 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 spec/tests/EnvironmentVariablesRunFacet/1.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 49ffac582f..c6756d2805 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -103,6 +103,7 @@ repos: name: Verify spec changes committed to website language: script files: ^spec/ + exclude: ".*tests.*" entry: .pre_commit/run-spec-snapshot.sh - id: pmd-client-java name: pmd-client-java diff --git a/.pre_commit/json-schema/test-facets.sh b/.pre_commit/json-schema/test-facets.sh index edbacf2612..2d0699d794 100755 --- a/.pre_commit/json-schema/test-facets.sh +++ b/.pre_commit/json-schema/test-facets.sh @@ -14,7 +14,7 @@ while [ "$1" ]; do test_events=("spec/tests/$event_type"/*.json) if [ ${#test_events[@]} -gt 0 ]; then echo "Validating $test_events against $1" - jv $1 $test_events + jv $1 $test_events --assert-format fi shift done \ No newline at end of file diff --git a/spec/tests/EnvironmentVariablesRunFacet/1.json b/spec/tests/EnvironmentVariablesRunFacet/1.json new file mode 100644 index 0000000000..87a0f7b130 --- /dev/null +++ b/spec/tests/EnvironmentVariablesRunFacet/1.json @@ -0,0 +1,16 @@ +{ + "environmentVariables": { + "_producer": "https://github.com/OpenLineage/OpenLineage", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-0/EnvironmentVariablesRunFacet.json", + "environmentVariables": [ + { + "name": "JAVA_HOME", + "value": "/usr/lib/jvm/java-11-openjdk" + }, + { + "name": "PATH", + "value": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" + } + ] + } +} diff --git a/spec/tests/NominalTimeRunFacet/1.json b/spec/tests/NominalTimeRunFacet/1.json index f6aa9771c4..eac6d06927 100644 --- a/spec/tests/NominalTimeRunFacet/1.json +++ b/spec/tests/NominalTimeRunFacet/1.json @@ -2,7 +2,7 @@ "nominalTime": { "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/client/python", "_schemaURL": "https://raw.githubusercontent.com/OpenLineage/OpenLineage/main/spec/OpenLineage.json#/definitions/NominalTimeRunFacet", - "nominalEndTime": "2020-01-02", - "nominalStartTime": "2020-01-01" + "nominalEndTime": "2020-01-02T04:00:00.001Z", + "nominalStartTime": "2020-01-01T04:00:00.001Z" } } From 0ed2663c577595430000ed40c2b731356f76c4d2 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 13:35:13 +0100 Subject: [PATCH 47/89] docs: fix outdated Spark-related docs (#3215) Signed-off-by: Maciej Obuchowski --- website/blog/openlineage-spark/index.mdx | 4 ++-- website/docs/guides/spark.md | 10 ++++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/website/blog/openlineage-spark/index.mdx b/website/blog/openlineage-spark/index.mdx index 44d850572c..ee121b4d8f 100644 --- a/website/blog/openlineage-spark/index.mdx +++ b/website/blog/openlineage-spark/index.mdx @@ -51,7 +51,7 @@ familiar with it and how it's used in Spark applications. OpenLineage integrates interface and collecting information about jobs that are executed inside a Spark application. To activate the listener, add the following properties to your Spark configuration: ``` -spark.jars.packages io.openlineage:openlineage-spark:0.3.+ +spark.jars.packages io.openlineage:openlineage-spark:1.23.0 spark.extraListeners io.openlineage.spark.agent.OpenLineageSparkListener ``` This can be added to your cluster’s `spark-defaults.conf` file, in which case it will record lineage for every job executed on the cluster, or added to specific jobs on submission via the `spark-submit` command. Once the listener is activated, it needs to know where to report lineage events, as well as the namespace of your jobs. Add the following additional configuration lines to your `spark-defaults.conf` file or your Spark submission script: @@ -122,7 +122,7 @@ spark = (SparkSession.builder.master('local').appName('openlineage_spark_test') .config('spark.jars', ",".join(files)) # Install and set up the OpenLineage listener - .config('spark.jars.packages', 'io.openlineage:openlineage-spark:0.3.+') + .config('spark.jars.packages', 'io.openlineage:openlineage-spark:1.23.0) .config('spark.extraListeners', 'io.openlineage.spark.agent.OpenLineageSparkListener') .config('spark.openlineage.transport.url', 'http://marquez-api:5000') .config('spark.openlineage.transport.type', 'http') diff --git a/website/docs/guides/spark.md b/website/docs/guides/spark.md index 65b4375567..261fc5fb10 100644 --- a/website/docs/guides/spark.md +++ b/website/docs/guides/spark.md @@ -13,14 +13,15 @@ This guide was developed using an **earlier version** of this integration and ma Adding OpenLineage to Spark is refreshingly uncomplicated, and this is thanks to Spark's SparkListener interface. OpenLineage integrates with Spark by implementing SparkListener and collecting information about jobs executed inside a Spark application. To activate the listener, add the following properties to your Spark configuration in your cluster's `spark-defaults.conf` file or, alternatively, add them to specific jobs on submission via the `spark-submit` command: ``` -spark.jars.packages io.openlineage:openlineage-spark:0.3.+ +spark.jars.packages io.openlineage:openlineage-spark:1.23.0 spark.extraListeners io.openlineage.spark.agent.OpenLineageSparkListener ``` Once activated, the listener needs to know where to report lineage events, as well as the namespace of your jobs. Add the following additional configuration lines to your `spark-defaults.conf` file or your Spark submission script: ``` -spark.openlineage.host {your.openlineage.host} +spark.openlineage.transport.url {your.openlineage.host} +spark.openlineage.transport.type {your.openlineage.transport.type} spark.openlineage.namespace {your.openlineage.namespace} ``` @@ -90,9 +91,10 @@ spark = (SparkSession.builder.master('local').appName('openlineage_spark_test') .config('spark.jars', ",".join(files)) # Install and set up the OpenLineage listener - .config('spark.jars.packages', 'io.openlineage:openlineage-spark:0.3.+') + .config('spark.jars.packages', 'io.openlineage:openlineage-spark:1.23.0') .config('spark.extraListeners', 'io.openlineage.spark.agent.OpenLineageSparkListener') - .config('spark.openlineage.host', 'http://marquez-api:5000') + .config('spark.openlineage.transport.url', 'http://marquez-api:5000') + .config('spark.openlineage.transport.type', 'http') .config('spark.openlineage.namespace', 'spark_integration') # Configure the Google credentials and project id From 22f8ecc9cc7abf28eef900ab17d2ee93f68d6bc0 Mon Sep 17 00:00:00 2001 From: Pahulpreet Singh <54016648+codelixir@users.noreply.github.com> Date: Tue, 5 Nov 2024 19:34:34 +0530 Subject: [PATCH 48/89] spark: Update Dataproc run facet to include runType property (#3167) * Update Dataproc run facet to include runType property Signed-off-by: Pahulpreet Singh * rename gcp dataproc spark run facet Signed-off-by: Pahulpreet Singh * rename runType to jobType in gcp dataproc run facet Signed-off-by: Pahulpreet Singh * update expected facets in GCPUtilsTest Signed-off-by: Pahulpreet Singh * rename gcp dataproc spark run facet Update gcp/dataproc/registry.json Signed-off-by: Pahulpreet Singh * rename gcp dataproc spark run facet Update gcp/lineage/registry.json Signed-off-by: Pahulpreet Singh --------- Signed-off-by: Pahulpreet Singh --- .../agent/facets/builder/GcpRunFacetBuilder.java | 2 +- .../openlineage/spark/agent/util/GCPUtils.java | 3 +++ .../spark/agent/util/GCPUtilsTest.java | 3 +++ ...arkRunFacet.json => GcpDataprocRunFacet.json} | 16 ++++++++++------ spec/registry/gcp/dataproc/registry.json | 2 +- spec/registry/gcp/lineage/registry.json | 2 +- 6 files changed, 19 insertions(+), 9 deletions(-) rename spec/registry/gcp/dataproc/facets/{GcpDataprocSparkRunFacet.json => GcpDataprocRunFacet.json} (78%) diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/facets/builder/GcpRunFacetBuilder.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/facets/builder/GcpRunFacetBuilder.java index 46c7baa87c..a17e8df1b6 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/facets/builder/GcpRunFacetBuilder.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/facets/builder/GcpRunFacetBuilder.java @@ -38,7 +38,7 @@ public GcpRunFacetBuilder(SparkContext sparkContext) { @Override protected void build(SparkListenerEvent event, BiConsumer consumer) { if (GCPUtils.isDataprocRuntime()) - consumer.accept("gcp_dataproc_spark", new GcpDataprocRunFacet(getDataprocAttributes())); + consumer.accept("gcp_dataproc", new GcpDataprocRunFacet(getDataprocAttributes())); } private Map getDataprocAttributes() { diff --git a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java index ce5996db16..f3072783b7 100644 --- a/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java +++ b/integration/spark/shared/src/main/java/io/openlineage/spark/agent/util/GCPUtils.java @@ -86,14 +86,17 @@ public static Map getDataprocRunFacetMap(SparkContext context) { getClusterUUID(context).ifPresent(p -> dataprocProperties.put("clusterUuid", p)); getDataprocJobID(context).ifPresent(p -> dataprocProperties.put("jobId", p)); getDataprocJobUUID(context).ifPresent(p -> dataprocProperties.put("jobUuid", p)); + dataprocProperties.put("jobType", "dataproc_job"); break; case BATCH: getDataprocBatchID(context).ifPresent(p -> dataprocProperties.put("batchId", p)); getDataprocBatchUUID(context).ifPresent(p -> dataprocProperties.put("batchUuid", p)); + dataprocProperties.put("jobType", "batch"); break; case INTERACTIVE: getDataprocSessionID(context).ifPresent(p -> dataprocProperties.put("sessionId", p)); getDataprocSessionUUID(context).ifPresent(p -> dataprocProperties.put("sessionUuid", p)); + dataprocProperties.put("jobType", "session"); break; case UNKNOWN: // do nothing diff --git a/integration/spark/shared/src/test/java/io/openlineage/spark/agent/util/GCPUtilsTest.java b/integration/spark/shared/src/test/java/io/openlineage/spark/agent/util/GCPUtilsTest.java index d8e24d193a..7d3eb36e9c 100644 --- a/integration/spark/shared/src/test/java/io/openlineage/spark/agent/util/GCPUtilsTest.java +++ b/integration/spark/shared/src/test/java/io/openlineage/spark/agent/util/GCPUtilsTest.java @@ -163,17 +163,20 @@ void testForDataprocSession() { EXPECTED_FACET_DATAPROC_CLUSTER.put("appName", TEST_APP_NAME); EXPECTED_FACET_DATAPROC_CLUSTER.put("appId", TEST_APP_ID); EXPECTED_FACET_DATAPROC_CLUSTER.put("projectId", TEST_PROJECT_ID); + EXPECTED_FACET_DATAPROC_CLUSTER.put("jobType", "dataproc_job"); EXPECTED_FACET_DATAPROC_BATCH.put("batchUuid", TEST_RESOURCE_UUID); EXPECTED_FACET_DATAPROC_BATCH.put("batchId", TEST_BATCH_ID); EXPECTED_FACET_DATAPROC_BATCH.put("appName", TEST_APP_NAME); EXPECTED_FACET_DATAPROC_BATCH.put("appId", TEST_APP_ID); EXPECTED_FACET_DATAPROC_BATCH.put("projectId", TEST_PROJECT_ID); + EXPECTED_FACET_DATAPROC_BATCH.put("jobType", "batch"); EXPECTED_FACET_DATAPROC_SESSION.put("sessionUuid", TEST_RESOURCE_UUID); EXPECTED_FACET_DATAPROC_SESSION.put("sessionId", TEST_SESSION_ID); EXPECTED_FACET_DATAPROC_SESSION.put("appName", TEST_APP_NAME); EXPECTED_FACET_DATAPROC_SESSION.put("appId", TEST_APP_ID); EXPECTED_FACET_DATAPROC_SESSION.put("projectId", TEST_PROJECT_ID); + EXPECTED_FACET_DATAPROC_SESSION.put("jobType", "session"); } } diff --git a/spec/registry/gcp/dataproc/facets/GcpDataprocSparkRunFacet.json b/spec/registry/gcp/dataproc/facets/GcpDataprocRunFacet.json similarity index 78% rename from spec/registry/gcp/dataproc/facets/GcpDataprocSparkRunFacet.json rename to spec/registry/gcp/dataproc/facets/GcpDataprocRunFacet.json index 81bf9d77f7..8ad3d95896 100644 --- a/spec/registry/gcp/dataproc/facets/GcpDataprocSparkRunFacet.json +++ b/spec/registry/gcp/dataproc/facets/GcpDataprocRunFacet.json @@ -1,8 +1,8 @@ { "$schema": "https://json-schema.org/draft/2020-12/schema", - "$id": "https://openlineage.io/spec/facets/1-0-0/GcpDataprocSparkRunFacet.json", + "$id": "https://openlineage.io/spec/facets/1-0-0/GcpDataprocRunFacet.json", "$defs": { - "GcpDataprocSparkRunFacet": { + "GcpDataprocRunFacet": { "allOf": [ { "$ref": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunFacet" @@ -11,11 +11,11 @@ "type": "object", "properties": { "appId": { - "description": "Application ID set in the spark configuration of the current context. Its format depends on the resource manager.", + "description": "Application ID set by the resource manager. For spark jobs, it is set in the spark configuration of the current context.", "type": "string" }, "appName": { - "description": "App name set in the spark configuration of the current context. It may be provided by the user.", + "description": "App name which may be provided by the user, or some default is used by the resource manager. For spark jobs, it is set in the spark configuration of the current context.", "type": "string" }, "batchId": { @@ -50,6 +50,10 @@ "description": "The name of the query node in the executed Spark Plan. Often used to describe the command being executed.", "type": "string" }, + "jobType": { + "description": "Identifies whether the process is a job (on a Dataproc cluster), a batch or a session.", + "type": "string" + }, "sessionId": { "description": "Populated only for Dataproc serverless interactive sessions. The resource id of the session, used for URL generation.", "type": "string" @@ -68,8 +72,8 @@ }, "type": "object", "properties": { - "gcp_dataproc_spark": { - "$ref": "#/$defs/GcpDataprocSparkRunFacet" + "gcp_dataproc": { + "$ref": "#/$defs/GcpDataprocRunFacet" } } } diff --git a/spec/registry/gcp/dataproc/registry.json b/spec/registry/gcp/dataproc/registry.json index cabd2d89b5..0e40aa10dc 100644 --- a/spec/registry/gcp/dataproc/registry.json +++ b/spec/registry/gcp/dataproc/registry.json @@ -1,6 +1,6 @@ { "producer": { "root_doc_URL": "https://cloud.google.com/dataproc/docs/guides/lineage", - "produced_facets": ["ol:gcp:dataproc:GcpDataprocSparkRunFacet.json"] + "produced_facets": ["ol:gcp:dataproc:GcpDataprocRunFacet.json"] } } diff --git a/spec/registry/gcp/lineage/registry.json b/spec/registry/gcp/lineage/registry.json index 2fb625611e..ba49179ea0 100644 --- a/spec/registry/gcp/lineage/registry.json +++ b/spec/registry/gcp/lineage/registry.json @@ -1,6 +1,6 @@ { "consumer": { "root_doc_URL": "https://cloud.google.com/data-catalog/docs/reference/data-lineage/rpc/google.cloud.datacatalog.lineage.v1#google.cloud.datacatalog.lineage.v1.Lineage.ProcessOpenLineageRunEvent", - "consumed_facets": ["ol:gcp:lineage:GcpLineageJobFacet.json", "ol:gcp:dataproc:GcpDataprocSparkRunFacet.json"] + "consumed_facets": ["ol:gcp:lineage:GcpLineageJobFacet.json", "ol:gcp:dataproc:GcpDataprocRunFacet.json"] } } From 46db181e0f5cd7d44bbf471698284beb9ac912e5 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 21:07:25 +0100 Subject: [PATCH 49/89] Move kinesis to separate module, move http transport to httpclient5 (#3205) Signed-off-by: Maciej Obuchowski --- .circleci/continue_config.yml | 1 - client/java/build.gradle | 4 +- client/java/settings.gradle | 1 + .../client/transports/FacetsConfig.java | 7 +- .../client/transports/FileTransport.java | 15 +- .../client/transports/HttpTransport.java | 101 +++++---- .../client/transports/Transport.java | 1 - ...lineage.client.transports.TransportBuilder | 1 - .../client/transports/FileTransportTest.java | 39 +++- .../client/transports/HttpTransportTest.java | 193 +++++++----------- client/java/transports-kinesis/README.md | 94 +++++++++ client/java/transports-kinesis/build.gradle | 29 +++ .../transports/kinesis}/KinesisConfig.java | 3 +- .../transports/kinesis}/KinesisTransport.java | 6 +- .../kinesis}/KinesisTransportBuilder.java | 6 +- ...lineage.client.transports.TransportBuilder | 1 + .../kinesis}/KinesisTransportTest.java | 43 +++- client/java/transports-s3/build.gradle | 2 +- integration/flink/app/build.gradle | 25 +-- .../flink/FlinkContainerUtils.java | 4 +- integration/flink/build.gradle | 42 +++- .../flink/examples/stateful/build.gradle | 25 ++- .../flink/examples/stateful/gradle.properties | 2 + .../gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 59536 bytes .../gradle/wrapper/gradle-wrapper.properties | 5 + integration/flink/examples/stateful/gradlew | 185 +++++++++++++++++ .../flink/FlinkStoppableApplication.java | 4 +- integration/flink/settings.gradle | 1 - integration/flink/shared/build.gradle | 2 +- .../spark/agent/ArgumentParserTest.java | 24 --- integration/spark/build.gradle | 2 +- integration/spark/buildDependencies.sh | 4 +- .../client/java/partials/java_transport.md | 88 -------- 33 files changed, 612 insertions(+), 348 deletions(-) create mode 100644 client/java/transports-kinesis/README.md create mode 100644 client/java/transports-kinesis/build.gradle rename client/java/{src/main/java/io/openlineage/client/transports => transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis}/KinesisConfig.java (92%) rename client/java/{src/main/java/io/openlineage/client/transports => transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis}/KinesisTransport.java (97%) rename client/java/{src/main/java/io/openlineage/client/transports => transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis}/KinesisTransportBuilder.java (67%) create mode 100644 client/java/transports-kinesis/src/main/resources/META-INF.services/io.openlineage.client.transports.TransportBuilder rename client/java/{src/test/java/io/openlineage/client/transports => transports-kinesis/src/test/java/io/openlineage/client/transports/kinesis}/KinesisTransportTest.java (74%) create mode 100644 integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.jar create mode 100644 integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.properties create mode 100755 integration/flink/examples/stateful/gradlew diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index da5781b790..86281da893 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -982,7 +982,6 @@ jobs: at: ~/ - set_java_version - run: chmod -R 777 data/iceberg/db - - run: ./gradlew --console=plain examples:stateful:build -Pflink.version=<< parameters.flink-version >> - run: ./gradlew --no-daemon --console=plain integrationTest --i -Pflink.version=<< parameters.flink-version >> - run: when: on_fail diff --git a/client/java/build.gradle b/client/java/build.gradle index 676234b6b6..caed893dcd 100644 --- a/client/java/build.gradle +++ b/client/java/build.gradle @@ -67,14 +67,14 @@ dependencies { implementation "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:${jacksonVersion}" implementation "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:${jacksonVersion}" implementation "com.fasterxml.jackson.datatype:jackson-datatype-jsr310:${jacksonVersion}" - implementation 'org.apache.httpcomponents:httpclient:4.5.14' + implementation 'org.apache.commons:commons-lang3:3.17.0' + implementation 'org.apache.httpcomponents.client5:httpclient5:5.4.1' implementation 'commons-logging:commons-logging:1.3.4' implementation 'org.slf4j:slf4j-api:1.7.36' implementation "io.micrometer:micrometer-core:${micrometerVersion}" compileOnly 'com.google.code.findbugs:jsr305:3.0.2' compileOnly 'org.apache.kafka:kafka-clients:3.8.1' - compileOnly 'com.amazonaws:amazon-kinesis-producer:0.15.12' compileOnly "org.projectlombok:lombok:${lombokVersion}" compileOnly "io.micrometer:micrometer-registry-statsd:${micrometerVersion}" annotationProcessor "org.projectlombok:lombok:${lombokVersion}" diff --git a/client/java/settings.gradle b/client/java/settings.gradle index 20ce2cf992..5a325e357a 100644 --- a/client/java/settings.gradle +++ b/client/java/settings.gradle @@ -8,6 +8,7 @@ rootProject.name = 'openlineage-java' include('generator') include('transports-gcplineage') include('transports-gcs') +include('transports-kinesis') include('transports-s3') buildCache { diff --git a/client/java/src/main/java/io/openlineage/client/transports/FacetsConfig.java b/client/java/src/main/java/io/openlineage/client/transports/FacetsConfig.java index a71f7cc9f5..dd2f388e13 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/FacetsConfig.java +++ b/client/java/src/main/java/io/openlineage/client/transports/FacetsConfig.java @@ -7,7 +7,6 @@ import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonProperty; -import com.google.common.annotations.VisibleForTesting; import io.openlineage.client.MergeConfig; import java.util.AbstractMap; import java.util.Collections; @@ -77,8 +76,7 @@ public void onOtherProperty(String key, Object value) { * separated.by.dots and one prefix can include many disabled facets. For this reason this * function can return a list of disabled facet properties. */ - @VisibleForTesting - static List asDisabledFacetProperties(String key, Object value) { + public static List asDisabledFacetProperties(String key, Object value) { /* The algorithm: - Flatten {prefix: {facet: {field: value, disabled: value}}, to to {prefix.facet.field: value, prefix.facet.disabled: value} @@ -160,8 +158,7 @@ public FacetsConfig mergeWithNonNull(FacetsConfig facetsConfig) { } @Data - @VisibleForTesting - static class DisabledFacetProperty { + public static class DisabledFacetProperty { private final String facetName; private final boolean disabled; } diff --git a/client/java/src/main/java/io/openlineage/client/transports/FileTransport.java b/client/java/src/main/java/io/openlineage/client/transports/FileTransport.java index 53b4c6b175..993774dbdf 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/FileTransport.java +++ b/client/java/src/main/java/io/openlineage/client/transports/FileTransport.java @@ -10,9 +10,10 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.StandardOpenOption; import lombok.NonNull; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; /** * Appends Openlineage events to a file specified. Events are separated by a newline character, @@ -46,11 +47,13 @@ public void emit(@NonNull OpenLineage.JobEvent jobEvent) { private void emit(String eventAsJson) { try { - FileUtils.writeStringToFile( - file, - eventAsJson.replace(System.lineSeparator(), "") + System.lineSeparator(), - StandardCharsets.UTF_8, - true); + Files.createDirectories(file.getParentFile().toPath()); + Files.write( + file.toPath(), + (eventAsJson.replace(System.lineSeparator(), "") + System.lineSeparator()) + .getBytes(StandardCharsets.UTF_8), + StandardOpenOption.CREATE, + StandardOpenOption.APPEND); log.debug("emitted event: " + eventAsJson); } catch (IOException | IllegalArgumentException e) { log.error("Writing event to a file {} failed: {}", file.getPath(), e); diff --git a/client/java/src/main/java/io/openlineage/client/transports/HttpTransport.java b/client/java/src/main/java/io/openlineage/client/transports/HttpTransport.java index e26666d92d..8010aecd90 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/HttpTransport.java +++ b/client/java/src/main/java/io/openlineage/client/transports/HttpTransport.java @@ -5,11 +5,11 @@ package io.openlineage.client.transports; -import static org.apache.http.Consts.UTF_8; -import static org.apache.http.HttpHeaders.ACCEPT; -import static org.apache.http.HttpHeaders.AUTHORIZATION; -import static org.apache.http.HttpHeaders.CONTENT_TYPE; -import static org.apache.http.entity.ContentType.APPLICATION_JSON; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.apache.hc.core5.http.ContentType.APPLICATION_JSON; +import static org.apache.hc.core5.http.HttpHeaders.ACCEPT; +import static org.apache.hc.core5.http.HttpHeaders.AUTHORIZATION; +import static org.apache.hc.core5.http.HttpHeaders.CONTENT_TYPE; import io.openlineage.client.OpenLineage; import io.openlineage.client.OpenLineageClientException; @@ -25,17 +25,24 @@ import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.entity.GzipCompressingEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.util.EntityUtils; +import org.apache.hc.client5.http.config.ConnectionConfig; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.entity.GzipCompressingEntity; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManager; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.SocketConfig; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.io.support.ClassicRequestBuilder; +import org.apache.hc.core5.net.URIBuilder; +import org.apache.hc.core5.pool.PoolConcurrencyPolicy; +import org.apache.hc.core5.pool.PoolReusePolicy; +import org.apache.hc.core5.util.Timeout; @Slf4j public final class HttpTransport extends Transport { @@ -63,14 +70,30 @@ private static CloseableHttpClient withTimeout(HttpConfig httpConfig) { // default one timeoutMs = 5000; } + Timeout timeout = Timeout.ofMilliseconds(timeoutMs); + + PoolingHttpClientConnectionManager connectionManager = + PoolingHttpClientConnectionManagerBuilder.create() + .setDefaultSocketConfig(SocketConfig.custom().setSoTimeout(timeout).build()) + .setPoolConcurrencyPolicy(PoolConcurrencyPolicy.STRICT) + .setConnPoolPolicy(PoolReusePolicy.LIFO) + .setDefaultConnectionConfig( + ConnectionConfig.custom() + .setSocketTimeout(timeout) + .setConnectTimeout(timeout) + .setTimeToLive(timeout) + .build()) + .build(); - RequestConfig config = + RequestConfig requestConfig = RequestConfig.custom() - .setConnectTimeout(timeoutMs) - .setConnectionRequestTimeout(timeoutMs) - .setSocketTimeout(timeoutMs) + .setConnectionRequestTimeout(timeout) + .setResponseTimeout(timeout) .build(); - return HttpClientBuilder.create().setDefaultRequestConfig(config).build(); + return HttpClientBuilder.create() + .setConnectionManager(connectionManager) + .setDefaultRequestConfig(requestConfig) + .build(); } public HttpTransport( @@ -130,21 +153,22 @@ public void emit(@NonNull OpenLineage.JobEvent jobEvent) { private void emit(String eventAsJson) { log.debug("POST event on URL {}", uri); try { - final HttpPost request = new HttpPost(); - request.setURI(uri); + ClassicRequestBuilder request = ClassicRequestBuilder.post(uri); setHeaders(request); setBody(request, eventAsJson); - try (CloseableHttpResponse response = http.execute(request)) { - throwOnHttpError(response); - EntityUtils.consume(response.getEntity()); - } + http.execute( + request.build(), + response -> { + throwOnHttpError(response); + return null; + }); } catch (IOException e) { throw new OpenLineageClientException(e); } } - private void setBody(HttpPost request, String body) { + private void setBody(ClassicRequestBuilder request, String body) { HttpEntity entity = new StringEntity(body, APPLICATION_JSON); if (compression == HttpConfig.Compression.GZIP) { entity = new GzipCompressingEntity(entity); @@ -152,22 +176,25 @@ private void setBody(HttpPost request, String body) { request.setEntity(entity); } - private void setHeaders(HttpPost request) { + private void setHeaders(ClassicRequestBuilder request) { + this.headers.forEach((key, value) -> request.setHeader(key, value)); // set headers to accept json - headers.put(ACCEPT, APPLICATION_JSON.toString()); - headers.put(CONTENT_TYPE, APPLICATION_JSON.toString()); + request.setHeader(ACCEPT, APPLICATION_JSON.toString()); + request.setHeader(CONTENT_TYPE, APPLICATION_JSON.toString()); // if tokenProvider preset overwrite authorization if (tokenProvider != null) { - headers.put(AUTHORIZATION, tokenProvider.getToken()); + request.addHeader(AUTHORIZATION, tokenProvider.getToken()); } - headers.forEach(request::addHeader); } - private void throwOnHttpError(@NonNull HttpResponse response) throws IOException { - final int code = response.getStatusLine().getStatusCode(); + private void throwOnHttpError(@NonNull ClassicHttpResponse response) + throws IOException, ParseException { + final int code = response.getCode(); + HttpEntity entity = response.getEntity(); + String body = EntityUtils.toString(entity, UTF_8); + EntityUtils.consume(entity); if (code >= 400 && code < 600) { // non-2xx - throw new HttpTransportResponseException( - code, EntityUtils.toString(response.getEntity(), UTF_8)); + throw new HttpTransportResponseException(code, body); } } diff --git a/client/java/src/main/java/io/openlineage/client/transports/Transport.java b/client/java/src/main/java/io/openlineage/client/transports/Transport.java index 59485df792..0f3596036d 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/Transport.java +++ b/client/java/src/main/java/io/openlineage/client/transports/Transport.java @@ -16,7 +16,6 @@ enum Type { FILE, HTTP, KAFKA, - KINESIS, NOOP }; diff --git a/client/java/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder b/client/java/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder index 832a4f56ec..de3b166593 100644 --- a/client/java/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder +++ b/client/java/src/main/resources/META-INF/services/io.openlineage.client.transports.TransportBuilder @@ -2,5 +2,4 @@ io.openlineage.client.transports.HttpTransportBuilder io.openlineage.client.transports.KafkaTransportBuilder io.openlineage.client.transports.ConsoleTransportBuilder io.openlineage.client.transports.FileTransportBuilder -io.openlineage.client.transports.KinesisTransportBuilder io.openlineage.client.transports.CompositeTransportBuilder \ No newline at end of file diff --git a/client/java/src/test/java/io/openlineage/client/transports/FileTransportTest.java b/client/java/src/test/java/io/openlineage/client/transports/FileTransportTest.java index 3683b1d5ba..6b50989c34 100644 --- a/client/java/src/test/java/io/openlineage/client/transports/FileTransportTest.java +++ b/client/java/src/test/java/io/openlineage/client/transports/FileTransportTest.java @@ -14,10 +14,12 @@ import io.openlineage.client.OpenLineage; import io.openlineage.client.OpenLineageClientUtils; import java.io.File; +import java.io.IOException; import java.net.URI; +import java.nio.file.Files; +import java.nio.file.Paths; import java.util.List; import lombok.SneakyThrows; -import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -32,13 +34,34 @@ class FileTransportTest { @BeforeEach @SneakyThrows public void beforeEach() { - FileUtils.deleteDirectory(new File(FILE_LOCATION_DIR)); + File dir = new File(FILE_LOCATION_DIR); + if (dir.exists()) { + deleteDirectory(dir); + } fileConfig = new FileConfig(); fileConfig.setLocation(FILE_LOCATION); transport = new FileTransport(fileConfig); } + private void deleteDirectory(File dir) throws IOException { + File[] files = dir.listFiles(); + if (files != null) { + for (File file : files) { + if (file.isDirectory()) { + deleteDirectory(file); + } else { + if (!file.delete()) { + throw new IOException("Failed to delete file: " + file); + } + } + } + } + if (!dir.delete()) { + throw new IOException("Failed to delete directory: " + dir); + } + } + @Test @SneakyThrows void transportEmitsRunEvent() { @@ -47,7 +70,7 @@ void transportEmitsRunEvent() { transport.emit(event); - List lines = FileUtils.readLines(new File(FILE_LOCATION)); + List lines = Files.readAllLines(Paths.get(FILE_LOCATION)); assertThat(lines.size()).isEqualTo(1); assertThat(lines.get(0)).isEqualTo(eventSerialized); @@ -61,7 +84,7 @@ void transportEmitsDatasetEvent() { transport.emit(event); - List lines = FileUtils.readLines(new File(FILE_LOCATION)); + List lines = Files.readAllLines(Paths.get(FILE_LOCATION)); assertThat(lines.size()).isEqualTo(1); assertThat(lines.get(0)).isEqualTo(eventSerialized); @@ -75,7 +98,7 @@ void transportEmitsJobEvent() { transport.emit(event); - List lines = FileUtils.readLines(new File(FILE_LOCATION)); + List lines = Files.readAllLines(Paths.get(FILE_LOCATION)); assertThat(lines.size()).isEqualTo(1); assertThat(lines.get(0)).isEqualTo(eventSerialized); @@ -96,7 +119,7 @@ void transportCannotAppendToFileWhenFileNotWriteable() { // should not be written transport.emit(runEvent()); - assertThat(FileUtils.readLines(new File(FILE_LOCATION)).size()).isEqualTo(1); + assertThat(Files.readAllLines(Paths.get(FILE_LOCATION)).size()).isEqualTo(1); } @Test @@ -108,7 +131,7 @@ void multipleEventsAreSeparatedByNewline() { transport.emit(event); transport.emit(anotherEvent); - List lines = FileUtils.readLines(new File(FILE_LOCATION)); + List lines = Files.readAllLines(Paths.get(FILE_LOCATION)); String eventSerialized = OpenLineageClientUtils.toJson(event); String anotherEventSerialized = OpenLineageClientUtils.toJson(anotherEvent); @@ -128,7 +151,7 @@ void newlinesAreRemovedFromWrittenEvents() { .build(); transport.emit(event); - List lines = FileUtils.readLines(new File(FILE_LOCATION)); + List lines = Files.readAllLines(Paths.get(FILE_LOCATION)); assertThat(lines.size()).isEqualTo(1); assertThat(lines.get(0)) diff --git a/client/java/src/test/java/io/openlineage/client/transports/HttpTransportTest.java b/client/java/src/test/java/io/openlineage/client/transports/HttpTransportTest.java index 302f5e57d8..08f284406f 100644 --- a/client/java/src/test/java/io/openlineage/client/transports/HttpTransportTest.java +++ b/client/java/src/test/java/io/openlineage/client/transports/HttpTransportTest.java @@ -9,16 +9,15 @@ import static io.openlineage.client.Events.jobEvent; import static io.openlineage.client.Events.runEvent; import static java.util.Collections.singletonMap; -import static org.apache.http.HttpHeaders.ACCEPT; -import static org.apache.http.HttpHeaders.CONTENT_TYPE; -import static org.apache.http.entity.ContentType.APPLICATION_JSON; +import static org.apache.hc.core5.http.ContentType.APPLICATION_JSON; +import static org.apache.hc.core5.http.HttpHeaders.ACCEPT; +import static org.apache.hc.core5.http.HttpHeaders.CONTENT_TYPE; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.RETURNS_DEEP_STUBS; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.mockStatic; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -36,20 +35,20 @@ import java.util.Map; import java.util.stream.Collectors; import java.util.zip.GZIPInputStream; -import org.apache.http.HttpEntity; -import org.apache.http.NameValuePair; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.config.RequestConfig.Builder; -import org.apache.http.client.entity.GzipCompressingEntity; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.entity.*; -import org.apache.http.impl.client.CloseableHttpClient; +import lombok.SneakyThrows; +import org.apache.hc.client5.http.entity.GzipCompressingEntity; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; +import org.apache.hc.core5.http.ClassicHttpRequest; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.NameValuePair; +import org.apache.hc.core5.http.io.HttpClientResponseHandler; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; -import org.mockito.MockedStatic; +@SuppressWarnings("unchecked") class HttpTransportTest { @Test @@ -81,13 +80,14 @@ void clientEmitsHttpTransport() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); + when(response.getCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); client.emit(runEvent()); - verify(http, times(1)).execute(any()); + verify(http, times(1)).execute(any(), any(HttpClientResponseHandler.class)); } @Test @@ -100,6 +100,7 @@ void httpTransportRaisesOnBothUriAndEndpoint() throws IOException { } @Test + @SneakyThrows void httpTransportDefaultEndpoint() throws IOException { CloseableHttpClient http = mock(CloseableHttpClient.class); HttpConfig config = new HttpConfig(); @@ -107,22 +108,24 @@ void httpTransportDefaultEndpoint() throws IOException { Transport transport = new HttpTransport(http, config); OpenLineageClient client = new OpenLineageClient(transport); - ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(ClassicHttpRequest.class); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(response.getCode()).thenReturn(200); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); client.emit(runEvent()); - verify(http, times(1)).execute(captor.capture()); + verify(http, times(1)).execute(captor.capture(), any(HttpClientResponseHandler.class)); - assertThat(captor.getValue().getURI()) + assertThat(captor.getValue().getUri()) .isEqualTo(URI.create("https://localhost:1500/api/v1/lineage")); } @Test + @SneakyThrows void httpTransportAcceptsExplicitEndpoint() throws IOException { CloseableHttpClient http = mock(CloseableHttpClient.class); HttpConfig config = new HttpConfig(); @@ -131,18 +134,19 @@ void httpTransportAcceptsExplicitEndpoint() throws IOException { Transport transport = new HttpTransport(http, config); OpenLineageClient client = new OpenLineageClient(transport); - ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(ClassicHttpRequest.class); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(response.getCode()).thenReturn(200); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); client.emit(runEvent()); - verify(http, times(1)).execute(captor.capture()); + verify(http, times(1)).execute(captor.capture(), any(HttpClientResponseHandler.class)); - assertThat(captor.getValue().getURI()).isEqualTo(URI.create("https://localhost:1500/")); + assertThat(captor.getValue().getUri()).isEqualTo(URI.create("https://localhost:1500/")); } @Test @@ -154,10 +158,11 @@ void httpTransportRaisesOn500() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(500); + when(response.getCode()).thenReturn(500); when(response.getEntity()).thenReturn(new StringEntity("whoops!", ContentType.TEXT_PLAIN)); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenThrow(new HttpTransportResponseException(500, "whoops!")); HttpTransportResponseException thrown = assertThrows(HttpTransportResponseException.class, () -> client.emit(runEvent())); @@ -166,75 +171,63 @@ void httpTransportRaisesOn500() throws IOException { assertThat(thrown.getMessage()).contains("500"); assertThat(thrown.getMessage()).contains("whoops!"); - verify(http, times(1)).execute(any()); + verify(http, times(1)).execute(any(), any(HttpClientResponseHandler.class)); } @Test void httpTransportRaisesOnConnectionFail() throws IOException { CloseableHttpClient http = mock(CloseableHttpClient.class); - Transport transport = HttpTransport.builder().uri("http://localhost:1500").http(http).build(); + HttpConfig config = new HttpConfig(); + config.setUrl(URI.create("http://localhost:1500")); + Transport transport = new HttpTransport(http, config); OpenLineageClient client = new OpenLineageClient(transport); - when(http.execute(any(HttpUriRequest.class))).thenThrow(new IOException("")); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenThrow(new IOException("Connection failed")); assertThrows(OpenLineageClientException.class, () -> client.emit(runEvent())); - verify(http, times(1)).execute(any()); + verify(http, times(1)).execute(any(), any(HttpClientResponseHandler.class)); } @Test void httpTransportBuilderRaisesOnBadUri() throws IOException { CloseableHttpClient http = mock(CloseableHttpClient.class); - HttpTransport.Builder builder = HttpTransport.builder().http(http); - assertThrows(OpenLineageClientException.class, () -> builder.uri("!http://localhost:1500!")); + HttpConfig config = new HttpConfig(); + config.setUrl(URI.create("https://localhost:1500/api/v1/lineage")); + config.setEndpoint("/"); + assertThrows(OpenLineageClientException.class, () -> new HttpTransport(http, config)); } @Test + @SneakyThrows void httpTransportSendsAuthAndQueryParams() throws IOException { CloseableHttpClient http = mock(CloseableHttpClient.class); - Transport transport = - HttpTransport.builder() - .uri("http://localhost:1500", singletonMap("param", "value")) - .http(http) - .apiKey("apiKey") - .build(); + HttpConfig config = new HttpConfig(); + config.setUrl(URI.create("https://localhost:1500")); + config.setUrlParams(singletonMap("param", "value")); + ApiKeyTokenProvider auth = new ApiKeyTokenProvider(); + auth.setApiKey("apiKey"); + config.setAuth(auth); + Transport transport = new HttpTransport(http, config); OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(response.getCode()).thenReturn(200); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); - ArgumentCaptor captor = ArgumentCaptor.forClass(HttpUriRequest.class); + ArgumentCaptor captor = ArgumentCaptor.forClass(ClassicHttpRequest.class); client.emit(runEvent()); - verify(http, times(1)).execute(captor.capture()); + verify(http, times(1)).execute(captor.capture(), any(HttpClientResponseHandler.class)); assertThat(captor.getValue().getFirstHeader("Authorization").getValue()) .isEqualTo("Bearer apiKey"); - assertThat(captor.getValue().getURI()) - .isEqualTo(URI.create("http://localhost:1500/api/v1/lineage?param=value")); - } - - @Test - void clientClosesNetworkResources() throws IOException { - CloseableHttpClient http = mock(CloseableHttpClient.class); - HttpConfig config = new HttpConfig(); - config.setUrl(URI.create("https://localhost:1500/api/v1/lineage")); - Transport transport = new HttpTransport(http, config); - OpenLineageClient client = new OpenLineageClient(transport); - - CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); - when(response.getEntity().isStreaming()).thenReturn(true); - - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); - - client.emit(runEvent()); - - verify(response, times(1)).close(); - verify(response.getEntity().getContent(), times(1)).close(); + assertThat(captor.getValue().getUri()) + .isEqualTo(URI.create("https://localhost:1500/api/v1/lineage?param=value")); } @Test @@ -254,10 +247,10 @@ void customHeaders() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); + when(response.getCode()).thenReturn(200); when(response.getEntity().isStreaming()).thenReturn(true); - Map map = new HashMap<>(); - when(http.execute(any(HttpUriRequest.class))) + Map map = new HashMap<>(); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) .thenAnswer( invocation -> { map.put("test", invocation.getArgument(0)); @@ -266,7 +259,7 @@ void customHeaders() throws IOException { client.emit(runEvent()); Map resultHeaders = - Arrays.stream(map.get("test").getAllHeaders()) + Arrays.stream(map.get("test").getHeaders()) .collect(Collectors.toMap(NameValuePair::getName, NameValuePair::getValue)); assertThat(resultHeaders) .containsEntry(ACCEPT, APPLICATION_JSON.toString()) @@ -286,10 +279,10 @@ void gzipCompression() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); + when(response.getCode()).thenReturn(200); when(response.getEntity().isStreaming()).thenReturn(true); - Map map = new HashMap<>(); - when(http.execute(any(HttpUriRequest.class))) + Map map = new HashMap<>(); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) .thenAnswer( invocation -> { map.put("test", invocation.getArgument(0)); @@ -331,13 +324,14 @@ void clientEmitsDatasetEventHttpTransport() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); + when(response.getCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); client.emit(datasetEvent()); - verify(http, times(1)).execute(any()); + verify(http, times(1)).execute(any(), any(HttpClientResponseHandler.class)); } @Test @@ -349,48 +343,13 @@ void clientEmitsJobEventHttpTransport() throws IOException { OpenLineageClient client = new OpenLineageClient(transport); CloseableHttpResponse response = mock(CloseableHttpResponse.class, RETURNS_DEEP_STUBS); - when(response.getStatusLine().getStatusCode()).thenReturn(200); + when(response.getCode()).thenReturn(200); - when(http.execute(any(HttpUriRequest.class))).thenReturn(response); + when(http.execute(any(ClassicHttpRequest.class), any(HttpClientResponseHandler.class))) + .thenReturn(response); client.emit(jobEvent()); - verify(http, times(1)).execute(any()); - } - - @Test - @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") - void testTimeout() { - HttpConfig config = new HttpConfig(); - config.setUrl(URI.create("https://localhost:1500/api/v1/lineage")); - config.setTimeout(2.5d); // 2.5 seconds - - Builder builder = mock(Builder.class); - try (MockedStatic mocked = mockStatic(RequestConfig.class)) { - when(RequestConfig.custom()).thenReturn(builder); - when(builder.setConnectTimeout(2500)).thenReturn(builder); - when(builder.setConnectionRequestTimeout(2500)).thenReturn(builder); - when(builder.setSocketTimeout(2500)).thenReturn(builder); - - new HttpTransport(config); - } - } - - @Test - @SuppressWarnings("PMD.JUnitTestsShouldIncludeAssert") - void testTimeoutInMillis() { - HttpConfig config = new HttpConfig(); - config.setUrl(URI.create("https://localhost:1500/api/v1/lineage")); - config.setTimeoutInMillis(3000); // 3 seconds - - Builder builder = mock(Builder.class); - try (MockedStatic mocked = mockStatic(RequestConfig.class)) { - when(RequestConfig.custom()).thenReturn(builder); - when(builder.setConnectTimeout(3000)).thenReturn(builder); - when(builder.setConnectionRequestTimeout(3000)).thenReturn(builder); - when(builder.setSocketTimeout(3000)).thenReturn(builder); - - new HttpTransport(config); - } + verify(http, times(1)).execute(any(), any(HttpClientResponseHandler.class)); } } diff --git a/client/java/transports-kinesis/README.md b/client/java/transports-kinesis/README.md new file mode 100644 index 0000000000..436fabf6cf --- /dev/null +++ b/client/java/transports-kinesis/README.md @@ -0,0 +1,94 @@ +# AWS Kinesis Transport + +This library provides a transport layer for sending emitted OpenLineage events to AWS Kinesis streams. + +## Getting Started + +### Adding the Dependency + +To use this transport in your project, you need to include the following dependency in your build configuration. This is particularly important for environments like `Spark`, where this transport must be on the classpath for lineage events to be emitted correctly. + +**Maven:** + +```xml + + io.openlineage + transports-kinesis + YOUR_VERSION_HERE + +``` + +#### Configuration + +- `type` - string, must be `"kinesis"`. Required. +- `streamName` - the streamName of the Kinesis. Required. +- `region` - the region of the Kinesis. Required. +- `roleArn` - the roleArn which is allowed to read/write to Kinesis stream. Optional. +- `properties` - a dictionary that contains a [Kinesis allowed properties](https://github.com/awslabs/amazon-kinesis-producer/blob/master/java/amazon-kinesis-producer-sample/default_config.properties). Optional. + +#### Behavior + +- Events are serialized to JSON, and then dispatched to the Kinesis stream. +- The partition key is generated as `{jobNamespace}:{jobName}`. +- Two constructors are available: one accepting both `KinesisProducer` and `KinesisConfig` and another solely accepting `KinesisConfig`. + +#### Examples + +```yaml +transport: + type: kinesis + streamName: your_kinesis_stream_name + region: your_aws_region + roleArn: arn:aws:iam::account-id:role/role-name + properties: + VerifyCertificate: true + ConnectTimeout: 6000 +``` + + +Spark: +```ini +spark.openlineage.transport.type=kinesis +spark.openlineage.transport.streamName=your_kinesis_stream_name +spark.openlineage.transport.region=your_aws_region +spark.openlineage.transport.roleArn=arn:aws:iam::account-id:role/role-name +spark.openlineage.transport.properties.VerifyCertificate=true +spark.openlineage.transport.properties.ConnectTimeout=6000 +``` + +Flink: +```ini +openlineage.transport.type=kinesis +openlineage.transport.streamName=your_kinesis_stream_name +openlineage.transport.region=your_aws_region +openlineage.transport.roleArn=arn:aws:iam::account-id:role/role-name +openlineage.transport.properties.VerifyCertificate=true +openlineage.transport.properties.ConnectTimeout=6000 +``` + +Code: +```java +import java.util.Properties; + +import io.openlineage.client.OpenLineageClient; +import io.openlineage.client.transports.KinesisConfig; +import io.openlineage.client.transports.KinesisTransport; + +Properties kinesisProperties = new Properties(); +kinesisProperties.setProperty("property_name_1", "value_1"); +kinesisProperties.setProperty("property_name_2", "value_2"); + +KinesisConfig kinesisConfig = new KinesisConfig(); +kinesisConfig.setStreamName("your_kinesis_stream_name"); +kinesisConfig.setRegion("your_aws_region"); +kinesisConfig.setRoleArn("arn:aws:iam::account-id:role/role-name"); +kinesisConfig.setProperties(kinesisProperties); + +OpenLineageClient client = OpenLineageClient.builder() + .transport( + new KinesisTransport(httpConfig)) + .build(); +``` + + + diff --git a/client/java/transports-kinesis/build.gradle b/client/java/transports-kinesis/build.gradle new file mode 100644 index 0000000000..1a10b99b78 --- /dev/null +++ b/client/java/transports-kinesis/build.gradle @@ -0,0 +1,29 @@ +/* +/* Copyright 2018-2024 contributors to the OpenLineage project +/* SPDX-License-Identifier: Apache-2.0 +*/ + +plugins { + id 'eclipse' + id 'jacoco' + id 'java' + id 'java-library' + id 'maven-publish' + id 'signing' + id "com.adarshr.test-logger" version "3.2.0" + // Don't bump above 6.13 - it requires Java 11 https://github.com/diffplug/spotless/blob/main/plugin-gradle/CHANGES.md#changes-12 + id 'com.diffplug.spotless' version '6.13.0' + id "com.gradleup.shadow" version "8.3.5" + id "pmd" + id "io.freefair.lombok" version "8.10.2" +} + +ext { + projectDescription = "GcpLineage OpenLineage transport library" +} + +dependencies { + implementation("com.amazonaws:amazon-kinesis-producer:0.15.12") +} + +apply from: '../transports.build.gradle' diff --git a/client/java/src/main/java/io/openlineage/client/transports/KinesisConfig.java b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisConfig.java similarity index 92% rename from client/java/src/main/java/io/openlineage/client/transports/KinesisConfig.java rename to client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisConfig.java index dd1174fcab..dbf9efa4c2 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/KinesisConfig.java +++ b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisConfig.java @@ -3,10 +3,11 @@ /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports; +package io.openlineage.client.transports.kinesis; import com.fasterxml.jackson.annotation.JsonProperty; import io.openlineage.client.MergeConfig; +import io.openlineage.client.transports.TransportConfig; import java.util.Properties; import lombok.AllArgsConstructor; import lombok.Getter; diff --git a/client/java/src/main/java/io/openlineage/client/transports/KinesisTransport.java b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransport.java similarity index 97% rename from client/java/src/main/java/io/openlineage/client/transports/KinesisTransport.java rename to client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransport.java index 5e7d69cf08..651ab10041 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/KinesisTransport.java +++ b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransport.java @@ -3,7 +3,7 @@ /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports; +package io.openlineage.client.transports.kinesis; import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider; import com.amazonaws.services.kinesis.producer.KinesisProducer; @@ -15,6 +15,7 @@ import com.google.common.util.concurrent.ListenableFuture; import io.openlineage.client.OpenLineage; import io.openlineage.client.OpenLineageClientUtils; +import io.openlineage.client.transports.Transport; import java.nio.ByteBuffer; import java.util.concurrent.Executor; import java.util.concurrent.Executors; @@ -34,8 +35,6 @@ public class KinesisTransport extends Transport { public KinesisTransport( @NonNull final KinesisProducer kinesisProducer, @NonNull final KinesisConfig kinesisConfig) { - - super(Type.KINESIS); this.streamName = kinesisConfig.getStreamName(); this.region = kinesisConfig.getRegion(); this.roleArn = kinesisConfig.getRoleArn(); @@ -44,7 +43,6 @@ public KinesisTransport( } public KinesisTransport(@NonNull final KinesisConfig kinesisConfig) { - super(Type.KINESIS); this.streamName = kinesisConfig.getStreamName(); this.region = kinesisConfig.getRegion(); this.roleArn = kinesisConfig.getRoleArn(); diff --git a/client/java/src/main/java/io/openlineage/client/transports/KinesisTransportBuilder.java b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransportBuilder.java similarity index 67% rename from client/java/src/main/java/io/openlineage/client/transports/KinesisTransportBuilder.java rename to client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransportBuilder.java index edb7f937ce..ec4b1180da 100644 --- a/client/java/src/main/java/io/openlineage/client/transports/KinesisTransportBuilder.java +++ b/client/java/transports-kinesis/src/main/java/io/openlineage/client/transports/kinesis/KinesisTransportBuilder.java @@ -3,7 +3,11 @@ /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports; +package io.openlineage.client.transports.kinesis; + +import io.openlineage.client.transports.Transport; +import io.openlineage.client.transports.TransportBuilder; +import io.openlineage.client.transports.TransportConfig; public class KinesisTransportBuilder implements TransportBuilder { diff --git a/client/java/transports-kinesis/src/main/resources/META-INF.services/io.openlineage.client.transports.TransportBuilder b/client/java/transports-kinesis/src/main/resources/META-INF.services/io.openlineage.client.transports.TransportBuilder new file mode 100644 index 0000000000..50bb081449 --- /dev/null +++ b/client/java/transports-kinesis/src/main/resources/META-INF.services/io.openlineage.client.transports.TransportBuilder @@ -0,0 +1 @@ +io.openlineage.client.transports.KinesisTransportBuilder diff --git a/client/java/src/test/java/io/openlineage/client/transports/KinesisTransportTest.java b/client/java/transports-kinesis/src/test/java/io/openlineage/client/transports/kinesis/KinesisTransportTest.java similarity index 74% rename from client/java/src/test/java/io/openlineage/client/transports/KinesisTransportTest.java rename to client/java/transports-kinesis/src/test/java/io/openlineage/client/transports/kinesis/KinesisTransportTest.java index 96ba7c06ad..8e6f38c76a 100644 --- a/client/java/src/test/java/io/openlineage/client/transports/KinesisTransportTest.java +++ b/client/java/transports-kinesis/src/test/java/io/openlineage/client/transports/kinesis/KinesisTransportTest.java @@ -3,11 +3,8 @@ /* SPDX-License-Identifier: Apache-2.0 */ -package io.openlineage.client.transports; +package io.openlineage.client.transports.kinesis; -import static io.openlineage.client.Events.datasetEvent; -import static io.openlineage.client.Events.jobEvent; -import static io.openlineage.client.Events.runEvent; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.*; @@ -19,14 +16,17 @@ import io.openlineage.client.OpenLineageClient; import io.openlineage.client.OpenLineageClientUtils; import java.io.IOException; +import java.net.URI; import java.util.Properties; +import java.util.UUID; import org.junit.jupiter.api.Test; import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; class KinesisTransportTest { @Test void clientEmitsRunEventKinesisTransport() throws IOException { - KinesisProducer producer = mock(KinesisProducer.class); + KinesisProducer producer = Mockito.mock(KinesisProducer.class); KinesisConfig config = new KinesisConfig(); Properties properties = new Properties(); @@ -42,7 +42,18 @@ void clientEmitsRunEventKinesisTransport() throws IOException { when(producer.addUserRecord(any(UserRecord.class))).thenReturn(mock(ListenableFuture.class)); - OpenLineage.RunEvent event = runEvent(); + OpenLineage.Job job = + new OpenLineage.JobBuilder().namespace("test-namespace").name("test-job").build(); + OpenLineage.Run run = + new OpenLineage.RunBuilder() + .runId(UUID.fromString("ea445b5c-22eb-457a-8007-01c7c52b6e54")) + .build(); + OpenLineage.RunEvent event = + new OpenLineage(URI.create("http://test.producer")) + .newRunEventBuilder() + .job(job) + .run(run) + .build(); client.emit(event); ArgumentCaptor captor = ArgumentCaptor.forClass(UserRecord.class); @@ -59,7 +70,7 @@ void clientEmitsRunEventKinesisTransport() throws IOException { @Test void clientEmitsDatasetEventKinesisTransport() throws IOException { - KinesisProducer producer = mock(KinesisProducer.class); + KinesisProducer producer = Mockito.mock(KinesisProducer.class); KinesisConfig config = new KinesisConfig(); Properties properties = new Properties(); @@ -75,7 +86,16 @@ void clientEmitsDatasetEventKinesisTransport() throws IOException { when(producer.addUserRecord(any(UserRecord.class))).thenReturn(mock(ListenableFuture.class)); - OpenLineage.DatasetEvent event = datasetEvent(); + OpenLineage.StaticDataset dataset = + new OpenLineage.StaticDatasetBuilder() + .namespace("test-namespace") + .name("test-dataset") + .build(); + OpenLineage.DatasetEvent event = + new OpenLineage(URI.create("http://test.producer")) + .newDatasetEventBuilder() + .dataset(dataset) + .build(); client.emit(event); ArgumentCaptor captor = ArgumentCaptor.forClass(UserRecord.class); @@ -93,7 +113,7 @@ void clientEmitsDatasetEventKinesisTransport() throws IOException { @Test void clientEmitsJobEventKinesisTransport() throws IOException { - KinesisProducer producer = mock(KinesisProducer.class); + KinesisProducer producer = Mockito.mock(KinesisProducer.class); KinesisConfig config = new KinesisConfig(); Properties properties = new Properties(); @@ -109,7 +129,10 @@ void clientEmitsJobEventKinesisTransport() throws IOException { when(producer.addUserRecord(any(UserRecord.class))).thenReturn(mock(ListenableFuture.class)); - OpenLineage.JobEvent event = jobEvent(); + OpenLineage.Job job = + new OpenLineage.JobBuilder().namespace("test-namespace").name("test-job").build(); + OpenLineage.JobEvent event = + new OpenLineage(URI.create("http://test.producer")).newJobEventBuilder().job(job).build(); client.emit(event); ArgumentCaptor captor = ArgumentCaptor.forClass(UserRecord.class); diff --git a/client/java/transports-s3/build.gradle b/client/java/transports-s3/build.gradle index 6fd04c07c4..904d7faa7e 100644 --- a/client/java/transports-s3/build.gradle +++ b/client/java/transports-s3/build.gradle @@ -21,7 +21,7 @@ plugins { ext { projectDescription = "S3 OpenLineage transport library" - s3MockVersion = "3.11.0" + s3MockVersion = "2.17.0" testcontainersVersion = "1.20.3" } diff --git a/integration/flink/app/build.gradle b/integration/flink/app/build.gradle index d1f97f0906..43a9da67ce 100644 --- a/integration/flink/app/build.gradle +++ b/integration/flink/app/build.gradle @@ -12,14 +12,8 @@ plugins { id 'com.diffplug.spotless' id 'io.franzbecker.gradle-lombok' id 'io.openlineage.common-config' -} - -final def fixturesDir = layout.buildDirectory.dir("fixtures") - -configurations { - fixturesJar { - canBeResolved = true - } + id 'maven-publish' + id 'com.github.johnrengelman.shadow' } archivesBaseName='openlineage-flink-app' @@ -38,7 +32,6 @@ ext { } dependencies { - fixturesJar(project(path: ':examples:stateful', configuration: 'shadow')) implementation(project(path: ":shared")) implementation(project(path: ":flink115")) implementation(project(path: ":flink117")) @@ -53,8 +46,7 @@ dependencies { annotationProcessor "org.projectlombok:lombok:${lombokVersion}" implementation "com.fasterxml.jackson.core:jackson-databind:${jacksonVersion}" - implementation 'org.apache.httpcomponents.client5:httpclient5:5.3.1' - + implementation 'org.apache.httpcomponents.client5:httpclient5:5.4.1' testImplementation "org.apache.flink:flink-java:$flinkVersion" testImplementation "org.apache.flink:flink-streaming-java:$flinkVersion" testImplementation platform('org.junit:junit-bom:5.11.0') @@ -78,13 +70,6 @@ dependencies { testAnnotationProcessor "org.projectlombok:lombok:${lombokVersion}" } -tasks.register("copyIntegrationTestFixtures", Copy) { - group = "copy" - description = "Copies integration test fixtures to the build directory" - from(configurations.named("fixturesJar")) - into(fixturesDir) -} - task sourceJar(type: Jar) { archiveClassifier = 'sources' from sourceSets.main.allJava @@ -118,18 +103,16 @@ def integrationTestConfiguration = { 'junit.platform.output.capture.stdout': 'true', 'junit.platform.output.capture.stderr': 'true', '.flink.jar': "${archivesBaseName}-${project.version}.jar", - 'fixtures.dir': fixturesDir.get().asFile.absolutePath ] classpath = project.sourceSets.test.runtimeClasspath } + task integrationTest(type: Test) { configure integrationTestConfiguration useJUnitPlatform { includeTags "integration-test" } - dependsOn copyIntegrationTestFixtures - } integrationTest.outputs.upToDateWhen { false } diff --git a/integration/flink/app/src/test/java/io/openlineage/flink/FlinkContainerUtils.java b/integration/flink/app/src/test/java/io/openlineage/flink/FlinkContainerUtils.java index 75a2416444..bdb676d274 100644 --- a/integration/flink/app/src/test/java/io/openlineage/flink/FlinkContainerUtils.java +++ b/integration/flink/app/src/test/java/io/openlineage/flink/FlinkContainerUtils.java @@ -290,8 +290,8 @@ static String getOpenLineageJarPath() { } static String getExampleAppJarPath() { - return Arrays.stream(new File(System.getProperty("fixtures.dir")).listFiles()) - .filter(file -> file.getName().startsWith("stateful")) + return Arrays.stream((new File("../fixtures")).listFiles()) + .filter(file -> file.getName().startsWith("flink-examples-stateful")) .map(file -> file.getPath()) .findAny() .get(); diff --git a/integration/flink/build.gradle b/integration/flink/build.gradle index 3945d4b7cd..14fccedb59 100644 --- a/integration/flink/build.gradle +++ b/integration/flink/build.gradle @@ -58,6 +58,41 @@ test { } } +task shadowStateful(type: Exec) { + workingDir 'examples/stateful' + commandLine 'bash', '-c', "pwd && ./gradlew clean shadowJar -Pflink.version=${flinkVersion} && mkdir -p ../../fixtures && ls -halt build/libs && cp build/libs/flink-examples-stateful-*.jar ../../fixtures && ls -la ../../fixtures" + doLast { + println "Copied stateful-*.jar to fixtures directory" + } +} + +def integrationTestConfiguration = { + forkEvery 1 + maxParallelForks 1 + testLogging { + events "passed", "skipped", "failed" + showStandardStreams = true + } + systemProperties = [ + 'flink.version': "${flinkVersion}", + 'junit.platform.output.capture.stdout': 'true', + 'junit.platform.output.capture.stderr': 'true', + '.flink.jar': "${archivesBaseName}-${project.version}.jar", + ] + classpath = project.sourceSets.test.runtimeClasspath +} + + +task integrationTest(type: Test) { + configure integrationTestConfiguration + useJUnitPlatform { + includeTags "integration-test" + } +} + +integrationTest.dependsOn(shadowJar, project.tasks.publishToMavenLocal, shadowStateful) + + def reportsDir = "${buildDir}/reports" def coverageDir = "${reportsDir}/coverage" @@ -179,14 +214,13 @@ shadowJar { relocate 'com.fasterxml.jackson', 'io.openlineage.flink.shaded.com.fasterxml.jackson' relocate 'com.github.ok2c.hc5', 'io.openlineage.flink.shaded.com.github.ok2c.hc5' relocate 'org.apache.httpcomponents.client5', 'io.openlineage.flink.shaded.org.apache.httpcomponents.client5' - relocate 'org.apache.http', 'io.openlineage.flink.shaded.org.apache.http' relocate 'javassist', 'io.openlineage.flink.shaded.javassist' relocate 'org.yaml', 'io.openlineage.flink.shaded.org.yaml' relocate 'org.apache.hc', 'io.openlineage.flink.shaded.org.apache.hc' relocate 'org.apache.commons.codec', 'io.openlineage.flink.shaded.org.apache.commons.codec' relocate 'org.apache.commons.lang3', 'io.openlineage.flink.shaded.org.apache.commons.lang3' - relocate "org.hdrhistogram", "io.openlineage.spark.shaded.org.hdrhistogram" - relocate "org.latencyutils", "io.openlineage.spark.shaded.org.latencyutils" + relocate "org.HdrHistogram", "io.openlineage.flink.shaded.org.hdrhistogram" + relocate "org.LatencyUtils", "io.openlineage.flink.shaded.org.latencyutils" dependencies { exclude(dependency('org.slf4j::')) } @@ -213,7 +247,7 @@ assemble { } spotlessJava { - dependsOn("compileJava", "compileTestJava", ":examples:stateful:generateAvroJava") + dependsOn("compileJava", "compileTestJava") } task createVersionProperties(dependsOn: processResources) { diff --git a/integration/flink/examples/stateful/build.gradle b/integration/flink/examples/stateful/build.gradle index c9322878cb..aaf6e5f865 100644 --- a/integration/flink/examples/stateful/build.gradle +++ b/integration/flink/examples/stateful/build.gradle @@ -2,14 +2,20 @@ plugins { id 'java-library' id 'jacoco' id 'pmd' - id "com.adarshr.test-logger" - id 'com.diffplug.spotless' - id "com.github.johnrengelman.shadow" - id 'io.openlineage.common-config' + id "com.adarshr.test-logger" version "4.0.0" + id 'com.diffplug.spotless' version '6.23.0' + id "com.github.johnrengelman.shadow" version "8.1.1" id "com.github.davidmc24.gradle.plugin.avro" version "1.9.1" id "com.google.protobuf" version "0.9.4" } +repositories { + gradlePluginPortal() + mavenCentral() + maven { url "https://packages.confluent.io/maven/" } + mavenLocal() +} + group 'io.openlineage.flink' shadowJar { @@ -36,8 +42,6 @@ ext { } dependencies { - implementation(project(path: ":app")) - implementation(project(path: ":shared")) configurations.all { exclude module: 'slf4j-log4j12' exclude module: 'log4j' @@ -49,7 +53,7 @@ dependencies { implementation 'org.awaitility:awaitility:4.2.0' implementation 'org.apache.httpcomponents.client5:httpclient5:5.3.1' - implementation("io.openlineage:openlineage-java:$project.version") + implementation "io.openlineage:openlineage-flink:$project.version" compileOnly "org.apache.flink:flink-java:$flinkVersion" compileOnly "org.apache.flink:flink-streaming-scala_2.12:$flinkVersion" compileOnly "org.apache.flink:flink-streaming-java:$flinkVersion" @@ -111,6 +115,13 @@ avro { fieldVisibility = 'PUBLIC' } +compileJava { + options.incremental = true + options.compilerArgs << '-parameters' + options.encoding = "UTF-8" + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 +} extractIncludeTestProto.enabled = false diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index a93f661875..e28bc49492 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1 +1,3 @@ version=1.24.0-SNAPSHOT +flink.version=1.19.0 +org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.jar b/integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000000000000000000000000000000..7454180f2ae8848c63b8b4dea2cb829da983f2fa GIT binary patch literal 59536 zcma&NbC71ylI~qywr$(CZQJHswz}-9F59+k+g;UV+cs{`J?GrGXYR~=-ydruB3JCa zB64N^cILAcWk5iofq)<(fq;O7{th4@;QxID0)qN`mJ?GIqLY#rX8-|G{5M0pdVW5^ zzXk$-2kQTAC?_N@B`&6-N-rmVFE=$QD?>*=4<|!MJu@}isLc4AW#{m2if&A5T5g&~ ziuMQeS*U5sL6J698wOd)K@oK@1{peP5&Esut<#VH^u)gp`9H4)`uE!2$>RTctN+^u z=ASkePDZA-X8)rp%D;p*~P?*a_=*Kwc<^>QSH|^<0>o37lt^+Mj1;4YvJ(JR-Y+?%Nu}JAYj5 z_Qc5%Ao#F?q32i?ZaN2OSNhWL;2oDEw_({7ZbgUjna!Fqn3NzLM@-EWFPZVmc>(fZ z0&bF-Ch#p9C{YJT9Rcr3+Y_uR^At1^BxZ#eo>$PLJF3=;t_$2|t+_6gg5(j{TmjYU zK12c&lE?Eh+2u2&6Gf*IdKS&6?rYbSEKBN!rv{YCm|Rt=UlPcW9j`0o6{66#y5t9C zruFA2iKd=H%jHf%ypOkxLnO8#H}#Zt{8p!oi6)7#NqoF({t6|J^?1e*oxqng9Q2Cc zg%5Vu!em)}Yuj?kaP!D?b?(C*w!1;>R=j90+RTkyEXz+9CufZ$C^umX^+4|JYaO<5 zmIM3#dv`DGM;@F6;(t!WngZSYzHx?9&$xEF70D1BvfVj<%+b#)vz)2iLCrTeYzUcL z(OBnNoG6Le%M+@2oo)&jdOg=iCszzv59e zDRCeaX8l1hC=8LbBt|k5?CXgep=3r9BXx1uR8!p%Z|0+4Xro=xi0G!e{c4U~1j6!) zH6adq0}#l{%*1U(Cb%4AJ}VLWKBPi0MoKFaQH6x?^hQ!6em@993xdtS%_dmevzeNl z(o?YlOI=jl(`L9^ z0O+H9k$_@`6L13eTT8ci-V0ljDMD|0ifUw|Q-Hep$xYj0hTO@0%IS^TD4b4n6EKDG z??uM;MEx`s98KYN(K0>c!C3HZdZ{+_53DO%9k5W%pr6yJusQAv_;IA}925Y%;+!tY z%2k!YQmLLOr{rF~!s<3-WEUs)`ix_mSU|cNRBIWxOox_Yb7Z=~Q45ZNe*u|m^|)d* zog=i>`=bTe!|;8F+#H>EjIMcgWcG2ORD`w0WD;YZAy5#s{65~qfI6o$+Ty&-hyMyJ z3Ra~t>R!p=5ZpxA;QkDAoPi4sYOP6>LT+}{xp}tk+<0k^CKCFdNYG(Es>p0gqD)jP zWOeX5G;9(m@?GOG7g;e74i_|SmE?`B2i;sLYwRWKLy0RLW!Hx`=!LH3&k=FuCsM=9M4|GqzA)anEHfxkB z?2iK-u(DC_T1};KaUT@3nP~LEcENT^UgPvp!QC@Dw&PVAhaEYrPey{nkcn(ro|r7XUz z%#(=$7D8uP_uU-oPHhd>>^adbCSQetgSG`e$U|7mr!`|bU0aHl_cmL)na-5x1#OsVE#m*+k84Y^+UMeSAa zbrVZHU=mFwXEaGHtXQq`2ZtjfS!B2H{5A<3(nb-6ARVV8kEmOkx6D2x7~-6hl;*-*}2Xz;J#a8Wn;_B5=m zl3dY;%krf?i-Ok^Pal-}4F`{F@TYPTwTEhxpZK5WCpfD^UmM_iYPe}wpE!Djai6_{ z*pGO=WB47#Xjb7!n2Ma)s^yeR*1rTxp`Mt4sfA+`HwZf%!7ZqGosPkw69`Ix5Ku6G z@Pa;pjzV&dn{M=QDx89t?p?d9gna*}jBly*#1!6}5K<*xDPJ{wv4& zM$17DFd~L*Te3A%yD;Dp9UGWTjRxAvMu!j^Tbc}2v~q^59d4bz zvu#!IJCy(BcWTc`;v$9tH;J%oiSJ_i7s;2`JXZF+qd4C)vY!hyCtl)sJIC{ebI*0> z@x>;EzyBv>AI-~{D6l6{ST=em*U( z(r$nuXY-#CCi^8Z2#v#UXOt`dbYN1z5jzNF2 z411?w)whZrfA20;nl&C1Gi+gk<`JSm+{|*2o<< zqM#@z_D`Cn|0H^9$|Tah)0M_X4c37|KQ*PmoT@%xHc3L1ZY6(p(sNXHa&49Frzto& zR`c~ClHpE~4Z=uKa5S(-?M8EJ$zt0&fJk~p$M#fGN1-y$7!37hld`Uw>Urri(DxLa;=#rK0g4J)pXMC zxzraOVw1+kNWpi#P=6(qxf`zSdUC?D$i`8ZI@F>k6k zz21?d+dw7b&i*>Kv5L(LH-?J%@WnqT7j#qZ9B>|Zl+=> z^U-pV@1y_ptHo4hl^cPRWewbLQ#g6XYQ@EkiP z;(=SU!yhjHp%1&MsU`FV1Z_#K1&(|5n(7IHbx&gG28HNT)*~-BQi372@|->2Aw5It z0CBpUcMA*QvsPy)#lr!lIdCi@1k4V2m!NH)%Px(vu-r(Q)HYc!p zJ^$|)j^E#q#QOgcb^pd74^JUi7fUmMiNP_o*lvx*q%_odv49Dsv$NV;6J z9GOXKomA{2Pb{w}&+yHtH?IkJJu~}Z?{Uk++2mB8zyvh*xhHKE``99>y#TdD z&(MH^^JHf;g(Tbb^&8P*;_i*2&fS$7${3WJtV7K&&(MBV2~)2KB3%cWg#1!VE~k#C z!;A;?p$s{ihyojEZz+$I1)L}&G~ml=udD9qh>Tu(ylv)?YcJT3ihapi!zgPtWb*CP zlLLJSRCj-^w?@;RU9aL2zDZY1`I3d<&OMuW=c3$o0#STpv_p3b9Wtbql>w^bBi~u4 z3D8KyF?YE?=HcKk!xcp@Cigvzy=lnFgc^9c%(^F22BWYNAYRSho@~*~S)4%AhEttv zvq>7X!!EWKG?mOd9&n>vvH1p4VzE?HCuxT-u+F&mnsfDI^}*-d00-KAauEaXqg3k@ zy#)MGX!X;&3&0s}F3q40ZmVM$(H3CLfpdL?hB6nVqMxX)q=1b}o_PG%r~hZ4gUfSp zOH4qlEOW4OMUc)_m)fMR_rl^pCfXc{$fQbI*E&mV77}kRF z&{<06AJyJ!e863o-V>FA1a9Eemx6>^F$~9ppt()ZbPGfg_NdRXBWoZnDy2;#ODgf! zgl?iOcF7Meo|{AF>KDwTgYrJLb$L2%%BEtO>T$C?|9bAB&}s;gI?lY#^tttY&hfr# zKhC+&b-rpg_?~uVK%S@mQleU#_xCsvIPK*<`E0fHE1&!J7!xD#IB|SSPW6-PyuqGn3^M^Rz%WT{e?OI^svARX&SAdU77V(C~ zM$H{Kg59op{<|8ry9ecfP%=kFm(-!W&?U0@<%z*+!*<e0XesMxRFu9QnGqun6R_%T+B%&9Dtk?*d$Q zb~>84jEAPi@&F@3wAa^Lzc(AJz5gsfZ7J53;@D<;Klpl?sK&u@gie`~vTsbOE~Cd4 z%kr56mI|#b(Jk&;p6plVwmNB0H@0SmgdmjIn5Ne@)}7Vty(yb2t3ev@22AE^s!KaN zyQ>j+F3w=wnx7w@FVCRe+`vUH)3gW%_72fxzqX!S&!dchdkRiHbXW1FMrIIBwjsai8`CB2r4mAbwp%rrO>3B$Zw;9=%fXI9B{d(UzVap7u z6piC-FQ)>}VOEuPpuqznpY`hN4dGa_1Xz9rVg(;H$5Te^F0dDv*gz9JS<|>>U0J^# z6)(4ICh+N_Q`Ft0hF|3fSHs*?a=XC;e`sJaU9&d>X4l?1W=|fr!5ShD|nv$GK;j46@BV6+{oRbWfqOBRb!ir88XD*SbC(LF}I1h#6@dvK%Toe%@ zhDyG$93H8Eu&gCYddP58iF3oQH*zLbNI;rN@E{T9%A8!=v#JLxKyUe}e}BJpB{~uN zqgxRgo0*-@-iaHPV8bTOH(rS(huwK1Xg0u+e!`(Irzu@Bld&s5&bWgVc@m7;JgELd zimVs`>vQ}B_1(2#rv#N9O`fJpVfPc7V2nv34PC);Dzbb;p!6pqHzvy?2pD&1NE)?A zt(t-ucqy@wn9`^MN5apa7K|L=9>ISC>xoc#>{@e}m#YAAa1*8-RUMKwbm|;5p>T`Z zNf*ph@tnF{gmDa3uwwN(g=`Rh)4!&)^oOy@VJaK4lMT&5#YbXkl`q?<*XtsqD z9PRK6bqb)fJw0g-^a@nu`^?71k|m3RPRjt;pIkCo1{*pdqbVs-Yl>4E>3fZx3Sv44grW=*qdSoiZ9?X0wWyO4`yDHh2E!9I!ZFi zVL8|VtW38}BOJHW(Ax#KL_KQzarbuE{(%TA)AY)@tY4%A%P%SqIU~8~-Lp3qY;U-} z`h_Gel7;K1h}7$_5ZZT0&%$Lxxr-<89V&&TCsu}LL#!xpQ1O31jaa{U34~^le*Y%L za?7$>Jk^k^pS^_M&cDs}NgXlR>16AHkSK-4TRaJSh#h&p!-!vQY%f+bmn6x`4fwTp z$727L^y`~!exvmE^W&#@uY!NxJi`g!i#(++!)?iJ(1)2Wk;RN zFK&O4eTkP$Xn~4bB|q8y(btx$R#D`O@epi4ofcETrx!IM(kWNEe42Qh(8*KqfP(c0 zouBl6>Fc_zM+V;F3znbo{x#%!?mH3`_ANJ?y7ppxS@glg#S9^MXu|FM&ynpz3o&Qh z2ujAHLF3($pH}0jXQsa#?t--TnF1P73b?4`KeJ9^qK-USHE)4!IYgMn-7z|=ALF5SNGkrtPG@Y~niUQV2?g$vzJN3nZ{7;HZHzWAeQ;5P|@Tl3YHpyznGG4-f4=XflwSJY+58-+wf?~Fg@1p1wkzuu-RF3j2JX37SQUc? zQ4v%`V8z9ZVZVqS8h|@@RpD?n0W<=hk=3Cf8R?d^9YK&e9ZybFY%jdnA)PeHvtBe- zhMLD+SSteHBq*q)d6x{)s1UrsO!byyLS$58WK;sqip$Mk{l)Y(_6hEIBsIjCr5t>( z7CdKUrJTrW%qZ#1z^n*Lb8#VdfzPw~OIL76aC+Rhr<~;4Tl!sw?Rj6hXj4XWa#6Tp z@)kJ~qOV)^Rh*-?aG>ic2*NlC2M7&LUzc9RT6WM%Cpe78`iAowe!>(T0jo&ivn8-7 zs{Qa@cGy$rE-3AY0V(l8wjI^uB8Lchj@?L}fYal^>T9z;8juH@?rG&g-t+R2dVDBe zq!K%{e-rT5jX19`(bP23LUN4+_zh2KD~EAYzhpEO3MUG8@}uBHH@4J zd`>_(K4q&>*k82(dDuC)X6JuPrBBubOg7qZ{?x!r@{%0);*`h*^F|%o?&1wX?Wr4b z1~&cy#PUuES{C#xJ84!z<1tp9sfrR(i%Tu^jnXy;4`Xk;AQCdFC@?V%|; zySdC7qS|uQRcH}EFZH%mMB~7gi}a0utE}ZE_}8PQH8f;H%PN41Cb9R%w5Oi5el^fd z$n{3SqLCnrF##x?4sa^r!O$7NX!}&}V;0ZGQ&K&i%6$3C_dR%I7%gdQ;KT6YZiQrW zk%q<74oVBV>@}CvJ4Wj!d^?#Zwq(b$E1ze4$99DuNg?6t9H}k_|D7KWD7i0-g*EO7 z;5{hSIYE4DMOK3H%|f5Edx+S0VI0Yw!tsaRS2&Il2)ea^8R5TG72BrJue|f_{2UHa z@w;^c|K3da#$TB0P3;MPlF7RuQeXT$ zS<<|C0OF(k)>fr&wOB=gP8!Qm>F41u;3esv7_0l%QHt(~+n; zf!G6%hp;Gfa9L9=AceiZs~tK+Tf*Wof=4!u{nIO90jH@iS0l+#%8=~%ASzFv7zqSB^?!@N7)kp0t&tCGLmzXSRMRyxCmCYUD2!B`? zhs$4%KO~m=VFk3Buv9osha{v+mAEq=ik3RdK@;WWTV_g&-$U4IM{1IhGX{pAu%Z&H zFfwCpUsX%RKg);B@7OUzZ{Hn{q6Vv!3#8fAg!P$IEx<0vAx;GU%}0{VIsmFBPq_mb zpe^BChDK>sc-WLKl<6 zwbW|e&d&dv9Wu0goueyu>(JyPx1mz0v4E?cJjFuKF71Q1)AL8jHO$!fYT3(;U3Re* zPPOe%*O+@JYt1bW`!W_1!mN&=w3G9ru1XsmwfS~BJ))PhD(+_J_^N6j)sx5VwbWK| zwRyC?W<`pOCY)b#AS?rluxuuGf-AJ=D!M36l{ua?@SJ5>e!IBr3CXIxWw5xUZ@Xrw z_R@%?{>d%Ld4p}nEsiA@v*nc6Ah!MUs?GA7e5Q5lPpp0@`%5xY$C;{%rz24$;vR#* zBP=a{)K#CwIY%p} zXVdxTQ^HS@O&~eIftU+Qt^~(DGxrdi3k}DdT^I7Iy5SMOp$QuD8s;+93YQ!OY{eB24%xY7ml@|M7I(Nb@K_-?F;2?et|CKkuZK_>+>Lvg!>JE~wN`BI|_h6$qi!P)+K-1Hh(1;a`os z55)4Q{oJiA(lQM#;w#Ta%T0jDNXIPM_bgESMCDEg6rM33anEr}=|Fn6)|jBP6Y}u{ zv9@%7*#RI9;fv;Yii5CI+KrRdr0DKh=L>)eO4q$1zmcSmglsV`*N(x=&Wx`*v!!hn6X-l0 zP_m;X??O(skcj+oS$cIdKhfT%ABAzz3w^la-Ucw?yBPEC+=Pe_vU8nd-HV5YX6X8r zZih&j^eLU=%*;VzhUyoLF;#8QsEfmByk+Y~caBqSvQaaWf2a{JKB9B>V&r?l^rXaC z8)6AdR@Qy_BxQrE2Fk?ewD!SwLuMj@&d_n5RZFf7=>O>hzVE*seW3U?_p|R^CfoY`?|#x9)-*yjv#lo&zP=uI`M?J zbzC<^3x7GfXA4{FZ72{PE*-mNHyy59Q;kYG@BB~NhTd6pm2Oj=_ zizmD?MKVRkT^KmXuhsk?eRQllPo2Ubk=uCKiZ&u3Xjj~<(!M94c)Tez@9M1Gfs5JV z->@II)CDJOXTtPrQudNjE}Eltbjq>6KiwAwqvAKd^|g!exgLG3;wP+#mZYr`cy3#39e653d=jrR-ulW|h#ddHu(m9mFoW~2yE zz5?dB%6vF}+`-&-W8vy^OCxm3_{02royjvmwjlp+eQDzFVEUiyO#gLv%QdDSI#3W* z?3!lL8clTaNo-DVJw@ynq?q!%6hTQi35&^>P85G$TqNt78%9_sSJt2RThO|JzM$iL zg|wjxdMC2|Icc5rX*qPL(coL!u>-xxz-rFiC!6hD1IR%|HSRsV3>Kq~&vJ=s3M5y8SG%YBQ|{^l#LGlg!D?E>2yR*eV%9m$_J6VGQ~AIh&P$_aFbh zULr0Z$QE!QpkP=aAeR4ny<#3Fwyw@rZf4?Ewq`;mCVv}xaz+3ni+}a=k~P+yaWt^L z@w67!DqVf7D%7XtXX5xBW;Co|HvQ8WR1k?r2cZD%U;2$bsM%u8{JUJ5Z0k= zZJARv^vFkmWx15CB=rb=D4${+#DVqy5$C%bf`!T0+epLJLnh1jwCdb*zuCL}eEFvE z{rO1%gxg>1!W(I!owu*mJZ0@6FM(?C+d*CeceZRW_4id*D9p5nzMY&{mWqrJomjIZ z97ZNnZ3_%Hx8dn;H>p8m7F#^2;T%yZ3H;a&N7tm=Lvs&lgJLW{V1@h&6Vy~!+Ffbb zv(n3+v)_D$}dqd!2>Y2B)#<+o}LH#%ogGi2-?xRIH)1!SD)u-L65B&bsJTC=LiaF+YOCif2dUX6uAA|#+vNR z>U+KQekVGon)Yi<93(d!(yw1h3&X0N(PxN2{%vn}cnV?rYw z$N^}_o!XUB!mckL`yO1rnUaI4wrOeQ(+&k?2mi47hzxSD`N#-byqd1IhEoh!PGq>t z_MRy{5B0eKY>;Ao3z$RUU7U+i?iX^&r739F)itdrTpAi-NN0=?^m%?{A9Ly2pVv>Lqs6moTP?T2-AHqFD-o_ znVr|7OAS#AEH}h8SRPQ@NGG47dO}l=t07__+iK8nHw^(AHx&Wb<%jPc$$jl6_p(b$ z)!pi(0fQodCHfM)KMEMUR&UID>}m^(!{C^U7sBDOA)$VThRCI0_+2=( zV8mMq0R(#z;C|7$m>$>`tX+T|xGt(+Y48@ZYu#z;0pCgYgmMVbFb!$?%yhZqP_nhn zy4<#3P1oQ#2b51NU1mGnHP$cf0j-YOgAA}A$QoL6JVLcmExs(kU{4z;PBHJD%_=0F z>+sQV`mzijSIT7xn%PiDKHOujX;n|M&qr1T@rOxTdxtZ!&u&3HHFLYD5$RLQ=heur zb>+AFokUVQeJy-#LP*^)spt{mb@Mqe=A~-4p0b+Bt|pZ+@CY+%x}9f}izU5;4&QFE zO1bhg&A4uC1)Zb67kuowWY4xbo&J=%yoXlFB)&$d*-}kjBu|w!^zbD1YPc0-#XTJr z)pm2RDy%J3jlqSMq|o%xGS$bPwn4AqitC6&e?pqWcjWPt{3I{>CBy;hg0Umh#c;hU3RhCUX=8aR>rmd` z7Orw(5tcM{|-^J?ZAA9KP|)X6n9$-kvr#j5YDecTM6n z&07(nD^qb8hpF0B^z^pQ*%5ePYkv&FabrlI61ntiVp!!C8y^}|<2xgAd#FY=8b*y( zuQOuvy2`Ii^`VBNJB&R!0{hABYX55ooCAJSSevl4RPqEGb)iy_0H}v@vFwFzD%>#I>)3PsouQ+_Kkbqy*kKdHdfkN7NBcq%V{x^fSxgXpg7$bF& zj!6AQbDY(1u#1_A#1UO9AxiZaCVN2F0wGXdY*g@x$ByvUA?ePdide0dmr#}udE%K| z3*k}Vv2Ew2u1FXBaVA6aerI36R&rzEZeDDCl5!t0J=ug6kuNZzH>3i_VN`%BsaVB3 zQYw|Xub_SGf{)F{$ZX5`Jc!X!;eybjP+o$I{Z^Hsj@D=E{MnnL+TbC@HEU2DjG{3-LDGIbq()U87x4eS;JXnSh;lRlJ z>EL3D>wHt-+wTjQF$fGyDO$>d+(fq@bPpLBS~xA~R=3JPbS{tzN(u~m#Po!?H;IYv zE;?8%^vle|%#oux(Lj!YzBKv+Fd}*Ur-dCBoX*t{KeNM*n~ZPYJ4NNKkI^MFbz9!v z4(Bvm*Kc!-$%VFEewYJKz-CQN{`2}KX4*CeJEs+Q(!kI%hN1!1P6iOq?ovz}X0IOi z)YfWpwW@pK08^69#wSyCZkX9?uZD?C^@rw^Y?gLS_xmFKkooyx$*^5#cPqntNTtSG zlP>XLMj2!VF^0k#ole7`-c~*~+_T5ls?x4)ah(j8vo_ zwb%S8qoaZqY0-$ZI+ViIA_1~~rAH7K_+yFS{0rT@eQtTAdz#8E5VpwnW!zJ_^{Utv zlW5Iar3V5t&H4D6A=>?mq;G92;1cg9a2sf;gY9pJDVKn$DYdQlvfXq}zz8#LyPGq@ z+`YUMD;^-6w&r-82JL7mA8&M~Pj@aK!m{0+^v<|t%APYf7`}jGEhdYLqsHW-Le9TL z_hZZ1gbrz7$f9^fAzVIP30^KIz!!#+DRLL+qMszvI_BpOSmjtl$hh;&UeM{ER@INV zcI}VbiVTPoN|iSna@=7XkP&-4#06C};8ajbxJ4Gcq8(vWv4*&X8bM^T$mBk75Q92j z1v&%a;OSKc8EIrodmIiw$lOES2hzGDcjjB`kEDfJe{r}yE6`eZL zEB`9u>Cl0IsQ+t}`-cx}{6jqcANucqIB>Qmga_&<+80E2Q|VHHQ$YlAt{6`Qu`HA3 z03s0-sSlwbvgi&_R8s={6<~M^pGvBNjKOa>tWenzS8s zR>L7R5aZ=mSU{f?ib4Grx$AeFvtO5N|D>9#)ChH#Fny2maHWHOf2G=#<9Myot#+4u zWVa6d^Vseq_0=#AYS(-m$Lp;*8nC_6jXIjEM`omUmtH@QDs3|G)i4j*#_?#UYVZvJ z?YjT-?!4Q{BNun;dKBWLEw2C-VeAz`%?A>p;)PL}TAZn5j~HK>v1W&anteARlE+~+ zj>c(F;?qO3pXBb|#OZdQnm<4xWmn~;DR5SDMxt0UK_F^&eD|KZ=O;tO3vy4@4h^;2 zUL~-z`-P1aOe?|ZC1BgVsL)2^J-&vIFI%q@40w0{jjEfeVl)i9(~bt2z#2Vm)p`V_ z1;6$Ae7=YXk#=Qkd24Y23t&GvRxaOoad~NbJ+6pxqzJ>FY#Td7@`N5xp!n(c!=RE& z&<<@^a$_Ys8jqz4|5Nk#FY$~|FPC0`*a5HH!|Gssa9=~66&xG9)|=pOOJ2KE5|YrR zw!w6K2aC=J$t?L-;}5hn6mHd%hC;p8P|Dgh6D>hGnXPgi;6r+eA=?f72y9(Cf_ho{ zH6#)uD&R=73^$$NE;5piWX2bzR67fQ)`b=85o0eOLGI4c-Tb@-KNi2pz=Ke@SDcPn za$AxXib84`!Sf;Z3B@TSo`Dz7GM5Kf(@PR>Ghzi=BBxK8wRp>YQoXm+iL>H*Jo9M3 z6w&E?BC8AFTFT&Tv8zf+m9<&S&%dIaZ)Aoqkak_$r-2{$d~0g2oLETx9Y`eOAf14QXEQw3tJne;fdzl@wV#TFXSLXM2428F-Q}t+n2g%vPRMUzYPvzQ9f# zu(liiJem9P*?0%V@RwA7F53r~|I!Ty)<*AsMX3J{_4&}{6pT%Tpw>)^|DJ)>gpS~1rNEh z0$D?uO8mG?H;2BwM5a*26^7YO$XjUm40XmBsb63MoR;bJh63J;OngS5sSI+o2HA;W zdZV#8pDpC9Oez&L8loZO)MClRz!_!WD&QRtQxnazhT%Vj6Wl4G11nUk8*vSeVab@N#oJ}`KyJv+8Mo@T1-pqZ1t|?cnaVOd;1(h9 z!$DrN=jcGsVYE-0-n?oCJ^4x)F}E;UaD-LZUIzcD?W^ficqJWM%QLy6QikrM1aKZC zi{?;oKwq^Vsr|&`i{jIphA8S6G4)$KGvpULjH%9u(Dq247;R#l&I0{IhcC|oBF*Al zvLo7Xte=C{aIt*otJD}BUq)|_pdR>{zBMT< z(^1RpZv*l*m*OV^8>9&asGBo8h*_4q*)-eCv*|Pq=XNGrZE)^(SF7^{QE_~4VDB(o zVcPA_!G+2CAtLbl+`=Q~9iW`4ZRLku!uB?;tWqVjB0lEOf}2RD7dJ=BExy=<9wkb- z9&7{XFA%n#JsHYN8t5d~=T~5DcW4$B%3M+nNvC2`0!#@sckqlzo5;hhGi(D9=*A4` z5ynobawSPRtWn&CDLEs3Xf`(8^zDP=NdF~F^s&={l7(aw&EG}KWpMjtmz7j_VLO;@ zM2NVLDxZ@GIv7*gzl1 zjq78tv*8#WSY`}Su0&C;2F$Ze(q>F(@Wm^Gw!)(j;dk9Ad{STaxn)IV9FZhm*n+U} zi;4y*3v%A`_c7a__DJ8D1b@dl0Std3F||4Wtvi)fCcBRh!X9$1x!_VzUh>*S5s!oq z;qd{J_r79EL2wIeiGAqFstWtkfIJpjVh%zFo*=55B9Zq~y0=^iqHWfQl@O!Ak;(o*m!pZqe9 z%U2oDOhR)BvW8&F70L;2TpkzIutIvNQaTjjs5V#8mV4!NQ}zN=i`i@WI1z0eN-iCS z;vL-Wxc^Vc_qK<5RPh(}*8dLT{~GzE{w2o$2kMFaEl&q zP{V=>&3kW7tWaK-Exy{~`v4J0U#OZBk{a9{&)&QG18L@6=bsZ1zC_d{{pKZ-Ey>I> z;8H0t4bwyQqgu4hmO`3|4K{R*5>qnQ&gOfdy?z`XD%e5+pTDzUt3`k^u~SaL&XMe= z9*h#kT(*Q9jO#w2Hd|Mr-%DV8i_1{J1MU~XJ3!WUplhXDYBpJH><0OU`**nIvPIof z|N8@I=wA)sf45SAvx||f?Z5uB$kz1qL3Ky_{%RPdP5iN-D2!p5scq}buuC00C@jom zhfGKm3|f?Z0iQ|K$Z~!`8{nmAS1r+fp6r#YDOS8V*;K&Gs7Lc&f^$RC66O|)28oh`NHy&vq zJh+hAw8+ybTB0@VhWN^0iiTnLsCWbS_y`^gs!LX!Lw{yE``!UVzrV24tP8o;I6-65 z1MUiHw^{bB15tmrVT*7-#sj6cs~z`wk52YQJ*TG{SE;KTm#Hf#a~|<(|ImHH17nNM z`Ub{+J3dMD!)mzC8b(2tZtokKW5pAwHa?NFiso~# z1*iaNh4lQ4TS)|@G)H4dZV@l*Vd;Rw;-;odDhW2&lJ%m@jz+Panv7LQm~2Js6rOW3 z0_&2cW^b^MYW3)@o;neZ<{B4c#m48dAl$GCc=$>ErDe|?y@z`$uq3xd(%aAsX)D%l z>y*SQ%My`yDP*zof|3@_w#cjaW_YW4BdA;#Glg1RQcJGY*CJ9`H{@|D+*e~*457kd z73p<%fB^PV!Ybw@)Dr%(ZJbX}xmCStCYv#K3O32ej{$9IzM^I{6FJ8!(=azt7RWf4 z7ib0UOPqN40X!wOnFOoddd8`!_IN~9O)#HRTyjfc#&MCZ zZAMzOVB=;qwt8gV?{Y2?b=iSZG~RF~uyx18K)IDFLl})G1v@$(s{O4@RJ%OTJyF+Cpcx4jmy|F3euCnMK!P2WTDu5j z{{gD$=M*pH!GGzL%P)V2*ROm>!$Y=z|D`!_yY6e7SU$~a5q8?hZGgaYqaiLnkK%?0 zs#oI%;zOxF@g*@(V4p!$7dS1rOr6GVs6uYCTt2h)eB4?(&w8{#o)s#%gN@BBosRUe z)@P@8_Zm89pr~)b>e{tbPC~&_MR--iB{=)y;INU5#)@Gix-YpgP<-c2Ms{9zuCX|3 z!p(?VaXww&(w&uBHzoT%!A2=3HAP>SDxcljrego7rY|%hxy3XlODWffO_%g|l+7Y_ zqV(xbu)s4lV=l7M;f>vJl{`6qBm>#ZeMA}kXb97Z)?R97EkoI?x6Lp0yu1Z>PS?2{ z0QQ(8D)|lc9CO3B~e(pQM&5(1y&y=e>C^X$`)_&XuaI!IgDTVqt31wX#n+@!a_A0ZQkA zCJ2@M_4Gb5MfCrm5UPggeyh)8 zO9?`B0J#rkoCx(R0I!ko_2?iO@|oRf1;3r+i)w-2&j?=;NVIdPFsB)`|IC0zk6r9c zRrkfxWsiJ(#8QndNJj@{@WP2Ackr|r1VxV{7S&rSU(^)-M8gV>@UzOLXu9K<{6e{T zXJ6b92r$!|lwjhmgqkdswY&}c)KW4A)-ac%sU;2^fvq7gfUW4Bw$b!i@duy1CAxSn z(pyh$^Z=&O-q<{bZUP+$U}=*#M9uVc>CQVgDs4swy5&8RAHZ~$)hrTF4W zPsSa~qYv_0mJnF89RnnJTH`3}w4?~epFl=D(35$ zWa07ON$`OMBOHgCmfO(9RFc<)?$x)N}Jd2A(<*Ll7+4jrRt9w zwGxExUXd9VB#I|DwfxvJ;HZ8Q{37^wDhaZ%O!oO(HpcqfLH%#a#!~;Jl7F5>EX_=8 z{()l2NqPz>La3qJR;_v+wlK>GsHl;uRA8%j`A|yH@k5r%55S9{*Cp%uw6t`qc1!*T za2OeqtQj7sAp#Q~=5Fs&aCR9v>5V+s&RdNvo&H~6FJOjvaj--2sYYBvMq;55%z8^o z|BJDA4vzfow#DO#ZQHh;Oq_{r+qP{R9ox2TOgwQiv7Ow!zjN+A@BN;0tA2lUb#+zO z(^b89eV)D7UVE+h{mcNc6&GtpOqDn_?VAQ)Vob$hlFwW%xh>D#wml{t&Ofmm_d_+; zKDxzdr}`n2Rw`DtyIjrG)eD0vut$}dJAZ0AohZ+ZQdWXn_Z@dI_y=7t3q8x#pDI-K z2VVc&EGq445Rq-j0=U=Zx`oBaBjsefY;%)Co>J3v4l8V(T8H?49_@;K6q#r~Wwppc z4XW0(4k}cP=5ex>-Xt3oATZ~bBWKv)aw|I|Lx=9C1s~&b77idz({&q3T(Y(KbWO?+ zmcZ6?WeUsGk6>km*~234YC+2e6Zxdl~<_g2J|IE`GH%n<%PRv-50; zH{tnVts*S5*_RxFT9eM0z-pksIb^drUq4>QSww=u;UFCv2AhOuXE*V4z?MM`|ABOC4P;OfhS(M{1|c%QZ=!%rQTDFx`+}?Kdx$&FU?Y<$x;j7z=(;Lyz+?EE>ov!8vvMtSzG!nMie zsBa9t8as#2nH}n8xzN%W%U$#MHNXmDUVr@GX{?(=yI=4vks|V)!-W5jHsU|h_&+kY zS_8^kd3jlYqOoiI`ZqBVY!(UfnAGny!FowZWY_@YR0z!nG7m{{)4OS$q&YDyw6vC$ zm4!$h>*|!2LbMbxS+VM6&DIrL*X4DeMO!@#EzMVfr)e4Tagn~AQHIU8?e61TuhcKD zr!F4(kEebk(Wdk-?4oXM(rJwanS>Jc%<>R(siF+>+5*CqJLecP_we33iTFTXr6W^G z7M?LPC-qFHK;E!fxCP)`8rkxZyFk{EV;G-|kwf4b$c1k0atD?85+|4V%YATWMG|?K zLyLrws36p%Qz6{}>7b>)$pe>mR+=IWuGrX{3ZPZXF3plvuv5Huax86}KX*lbPVr}L z{C#lDjdDeHr~?l|)Vp_}T|%$qF&q#U;ClHEPVuS+Jg~NjC1RP=17=aQKGOcJ6B3mp z8?4*-fAD~}sX*=E6!}^u8)+m2j<&FSW%pYr_d|p_{28DZ#Cz0@NF=gC-o$MY?8Ca8 zr5Y8DSR^*urS~rhpX^05r30Ik#2>*dIOGxRm0#0YX@YQ%Mg5b6dXlS!4{7O_kdaW8PFSdj1=ryI-=5$fiieGK{LZ+SX(1b=MNL!q#lN zv98?fqqTUH8r8C7v(cx#BQ5P9W>- zmW93;eH6T`vuJ~rqtIBg%A6>q>gnWb3X!r0wh_q;211+Om&?nvYzL1hhtjB zK_7G3!n7PL>d!kj){HQE zE8(%J%dWLh1_k%gVXTZt zEdT09XSKAx27Ncaq|(vzL3gm83q>6CAw<$fTnMU05*xAe&rDfCiu`u^1)CD<>sx0i z*hr^N_TeN89G(nunZoLBf^81#pmM}>JgD@Nn1l*lN#a=B=9pN%tmvYFjFIoKe_(GF z-26x{(KXdfsQL7Uv6UtDuYwV`;8V3w>oT_I<`Ccz3QqK9tYT5ZQzbop{=I=!pMOCb zCU68`n?^DT%^&m>A%+-~#lvF!7`L7a{z<3JqIlk1$<||_J}vW1U9Y&eX<}l8##6i( zZcTT@2`9(Mecptm@{3A_Y(X`w9K0EwtPq~O!16bq{7c0f7#(3wn-^)h zxV&M~iiF!{-6A@>o;$RzQ5A50kxXYj!tcgme=Qjrbje~;5X2xryU;vH|6bE(8z^<7 zQ>BG7_c*JG8~K7Oe68i#0~C$v?-t@~@r3t2inUnLT(c=URpA9kA8uq9PKU(Ps(LVH zqgcqW>Gm?6oV#AldDPKVRcEyQIdTT`Qa1j~vS{<;SwyTdr&3*t?J)y=M7q*CzucZ&B0M=joT zBbj@*SY;o2^_h*>R0e({!QHF0=)0hOj^B^d*m>SnRrwq>MolNSgl^~r8GR#mDWGYEIJA8B<|{{j?-7p zVnV$zancW3&JVDtVpIlI|5djKq0(w$KxEFzEiiL=h5Jw~4Le23@s(mYyXWL9SX6Ot zmb)sZaly_P%BeX_9 zw&{yBef8tFm+%=--m*J|o~+Xg3N+$IH)t)=fqD+|fEk4AAZ&!wcN5=mi~Vvo^i`}> z#_3ahR}Ju)(Px7kev#JGcSwPXJ2id9%Qd2A#Uc@t8~egZ8;iC{e! z%=CGJOD1}j!HW_sgbi_8suYnn4#Ou}%9u)dXd3huFIb!ytlX>Denx@pCS-Nj$`VO&j@(z!kKSP0hE4;YIP#w9ta=3DO$7f*x zc9M4&NK%IrVmZAe=r@skWD`AEWH=g+r|*13Ss$+{c_R!b?>?UaGXlw*8qDmY#xlR= z<0XFbs2t?8i^G~m?b|!Hal^ZjRjt<@a? z%({Gn14b4-a|#uY^=@iiKH+k?~~wTj5K1A&hU z2^9-HTC)7zpoWK|$JXaBL6C z#qSNYtY>65T@Zs&-0cHeu|RX(Pxz6vTITdzJdYippF zC-EB+n4}#lM7`2Ry~SO>FxhKboIAF#Z{1wqxaCb{#yEFhLuX;Rx(Lz%T`Xo1+a2M}7D+@wol2)OJs$TwtRNJ={( zD@#zTUEE}#Fz#&(EoD|SV#bayvr&E0vzmb%H?o~46|FAcx?r4$N z&67W3mdip-T1RIxwSm_&(%U|+WvtGBj*}t69XVd&ebn>KOuL(7Y8cV?THd-(+9>G7*Nt%T zcH;`p={`SOjaf7hNd(=37Lz3-51;58JffzIPgGs_7xIOsB5p2t&@v1mKS$2D$*GQ6 zM(IR*j4{nri7NMK9xlDy-hJW6sW|ZiDRaFiayj%;(%51DN!ZCCCXz+0Vm#};70nOx zJ#yA0P3p^1DED;jGdPbQWo0WATN=&2(QybbVdhd=Vq*liDk`c7iZ?*AKEYC#SY&2g z&Q(Ci)MJ{mEat$ZdSwTjf6h~roanYh2?9j$CF@4hjj_f35kTKuGHvIs9}Re@iKMxS-OI*`0S z6s)fOtz}O$T?PLFVSeOjSO26$@u`e<>k(OSP!&YstH3ANh>)mzmKGNOwOawq-MPXe zy4xbeUAl6tamnx))-`Gi2uV5>9n(73yS)Ukma4*7fI8PaEwa)dWHs6QA6>$}7?(L8 ztN8M}?{Tf!Zu22J5?2@95&rQ|F7=FK-hihT-vDp!5JCcWrVogEnp;CHenAZ)+E+K5 z$Cffk5sNwD_?4+ymgcHR(5xgt20Z8M`2*;MzOM#>yhk{r3x=EyM226wb&!+j`W<%* zSc&|`8!>dn9D@!pYow~(DsY_naSx7(Z4i>cu#hA5=;IuI88}7f%)bRkuY2B;+9Uep zpXcvFWkJ!mQai63BgNXG26$5kyhZ2&*3Q_tk)Ii4M>@p~_~q_cE!|^A;_MHB;7s#9 zKzMzK{lIxotjc};k67^Xsl-gS!^*m*m6kn|sbdun`O?dUkJ{0cmI0-_2y=lTAfn*Y zKg*A-2sJq)CCJgY0LF-VQvl&6HIXZyxo2#!O&6fOhbHXC?%1cMc6y^*dOS{f$=137Ds1m01qs`>iUQ49JijsaQ( zksqV9@&?il$|4Ua%4!O15>Zy&%gBY&wgqB>XA3!EldQ%1CRSM(pp#k~-pkcCg4LAT zXE=puHbgsw)!xtc@P4r~Z}nTF=D2~j(6D%gTBw$(`Fc=OOQ0kiW$_RDd=hcO0t97h zb86S5r=>(@VGy1&#S$Kg_H@7G^;8Ue)X5Y+IWUi`o;mpvoV)`fcVk4FpcT|;EG!;? zHG^zrVVZOm>1KFaHlaogcWj(v!S)O(Aa|Vo?S|P z5|6b{qkH(USa*Z7-y_Uvty_Z1|B{rTS^qmEMLEYUSk03_Fg&!O3BMo{b^*`3SHvl0 zhnLTe^_vVIdcSHe)SQE}r~2dq)VZJ!aSKR?RS<(9lzkYo&dQ?mubnWmgMM37Nudwo z3Vz@R{=m2gENUE3V4NbIzAA$H1z0pagz94-PTJyX{b$yndsdKptmlKQKaaHj@3=ED zc7L?p@%ui|RegVYutK$64q4pe9+5sv34QUpo)u{1ci?)_7gXQd{PL>b0l(LI#rJmN zGuO+%GO`xneFOOr4EU(Wg}_%bhzUf;d@TU+V*2#}!2OLwg~%D;1FAu=Un>OgjPb3S z7l(riiCwgghC=Lm5hWGf5NdGp#01xQ59`HJcLXbUR3&n%P(+W2q$h2Qd z*6+-QXJ*&Kvk9ht0f0*rO_|FMBALen{j7T1l%=Q>gf#kma zQlg#I9+HB+z*5BMxdesMND`_W;q5|FaEURFk|~&{@qY32N$G$2B=&Po{=!)x5b!#n zxLzblkq{yj05#O7(GRuT39(06FJlalyv<#K4m}+vs>9@q-&31@1(QBv82{}Zkns~K ze{eHC_RDX0#^A*JQTwF`a=IkE6Ze@j#-8Q`tTT?k9`^ZhA~3eCZJ-Jr{~7Cx;H4A3 zcZ+Zj{mzFZbVvQ6U~n>$U2ZotGsERZ@}VKrgGh0xM;Jzt29%TX6_&CWzg+YYMozrM z`nutuS)_0dCM8UVaKRj804J4i%z2BA_8A4OJRQ$N(P9Mfn-gF;4#q788C@9XR0O3< zsoS4wIoyt046d+LnSCJOy@B@Uz*#GGd#+Ln1ek5Dv>(ZtD@tgZlPnZZJGBLr^JK+!$$?A_fA3LOrkoDRH&l7 zcMcD$Hsjko3`-{bn)jPL6E9Ds{WskMrivsUu5apD z?grQO@W7i5+%X&E&p|RBaEZ(sGLR@~(y^BI@lDMot^Ll?!`90KT!JXUhYS`ZgX3jnu@Ja^seA*M5R@f`=`ynQV4rc$uT1mvE?@tz)TN<=&H1%Z?5yjxcpO+6y_R z6EPuPKM5uxKpmZfT(WKjRRNHs@ib)F5WAP7QCADvmCSD#hPz$V10wiD&{NXyEwx5S z6NE`3z!IS^$s7m}PCwQutVQ#~w+V z=+~->DI*bR2j0^@dMr9`p>q^Ny~NrAVxrJtX2DUveic5vM%#N*XO|?YAWwNI$Q)_) zvE|L(L1jP@F%gOGtnlXtIv2&1i8q<)Xfz8O3G^Ea~e*HJsQgBxWL(yuLY+jqUK zRE~`-zklrGog(X}$9@ZVUw!8*=l`6mzYLtsg`AvBYz(cxmAhr^j0~(rzXdiOEeu_p zE$sf2(w(BPAvO5DlaN&uQ$4@p-b?fRs}d7&2UQ4Fh?1Hzu*YVjcndqJLw0#q@fR4u zJCJ}>_7-|QbvOfylj+e^_L`5Ep9gqd>XI3-O?Wp z-gt*P29f$Tx(mtS`0d05nHH=gm~Po_^OxxUwV294BDKT>PHVlC5bndncxGR!n(OOm znsNt@Q&N{TLrmsoKFw0&_M9$&+C24`sIXGWgQaz=kY;S{?w`z^Q0JXXBKFLj0w0U6P*+jPKyZHX9F#b0D1$&(- zrm8PJd?+SrVf^JlfTM^qGDK&-p2Kdfg?f>^%>1n8bu&byH(huaocL>l@f%c*QkX2i znl}VZ4R1en4S&Bcqw?$=Zi7ohqB$Jw9x`aM#>pHc0x z0$!q7iFu zZ`tryM70qBI6JWWTF9EjgG@>6SRzsd}3h+4D8d~@CR07P$LJ}MFsYi-*O%XVvD@yT|rJ+Mk zDllJ7$n0V&A!0flbOf)HE6P_afPWZmbhpliqJuw=-h+r;WGk|ntkWN(8tKlYpq5Ow z(@%s>IN8nHRaYb*^d;M(D$zGCv5C|uqmsDjwy4g=Lz>*OhO3z=)VD}C<65;`89Ye} zSCxrv#ILzIpEx1KdLPlM&%Cctf@FqTKvNPXC&`*H9=l=D3r!GLM?UV zOxa(8ZsB`&+76S-_xuj?G#wXBfDY@Z_tMpXJS7^mp z@YX&u0jYw2A+Z+bD#6sgVK5ZgdPSJV3>{K^4~%HV?rn~4D)*2H!67Y>0aOmzup`{D zzDp3c9yEbGCY$U<8biJ_gB*`jluz1ShUd!QUIQJ$*1;MXCMApJ^m*Fiv88RZ zFopLViw}{$Tyhh_{MLGIE2~sZ)t0VvoW%=8qKZ>h=adTe3QM$&$PO2lfqH@brt!9j ziePM8$!CgE9iz6B<6_wyTQj?qYa;eC^{x_0wuwV~W+^fZmFco-o%wsKSnjXFEx02V zF5C2t)T6Gw$Kf^_c;Ei3G~uC8SM-xyycmXyC2hAVi-IfXqhu$$-C=*|X?R0~hu z8`J6TdgflslhrmDZq1f?GXF7*ALeMmOEpRDg(s*H`4>_NAr`2uqF;k;JQ+8>A|_6ZNsNLECC%NNEb1Y1dP zbIEmNpK)#XagtL4R6BC{C5T(+=yA-(Z|Ap}U-AfZM#gwVpus3(gPn}Q$CExObJ5AC z)ff9Yk?wZ}dZ-^)?cbb9Fw#EjqQ8jxF4G3=L?Ra zg_)0QDMV1y^A^>HRI$x?Op@t;oj&H@1xt4SZ9(kifQ zb59B*`M99Td7@aZ3UWvj1rD0sE)d=BsBuW*KwkCds7ay(7*01_+L}b~7)VHI>F_!{ zyxg-&nCO?v#KOUec0{OOKy+sjWA;8rTE|Lv6I9H?CI?H(mUm8VXGwU$49LGpz&{nQp2}dinE1@lZ1iox6{ghN&v^GZv9J${7WaXj)<0S4g_uiJ&JCZ zr8-hsu`U%N;+9N^@&Q0^kVPB3)wY(rr}p7{p0qFHb3NUUHJb672+wRZs`gd1UjKPX z4o6zljKKA+Kkj?H>Ew63o%QjyBk&1!P22;MkD>sM0=z_s-G{mTixJCT9@_|*(p^bz zJ8?ZZ&;pzV+7#6Mn`_U-)k8Pjg?a;|Oe^us^PoPY$Va~yi8|?+&=y$f+lABT<*pZr zP}D{~Pq1Qyni+@|aP;ixO~mbEW9#c0OU#YbDZIaw=_&$K%Ep2f%hO^&P67hApZe`x zv8b`Mz@?M_7-)b!lkQKk)JXXUuT|B8kJlvqRmRpxtQDgvrHMXC1B$M@Y%Me!BSx3P z#2Eawl$HleZhhTS6Txm>lN_+I`>eV$&v9fOg)%zVn3O5mI*lAl>QcHuW6!Kixmq`X zBCZ*Ck6OYtDiK!N47>jxI&O2a9x7M|i^IagRr-fmrmikEQGgw%J7bO|)*$2FW95O4 zeBs>KR)izRG1gRVL;F*sr8A}aRHO0gc$$j&ds8CIO1=Gwq1%_~E)CWNn9pCtBE}+`Jelk4{>S)M)`Ll=!~gnn1yq^EX(+y*ik@3Ou0qU`IgYi3*doM+5&dU!cho$pZ zn%lhKeZkS72P?Cf68<#kll_6OAO26bIbueZx**j6o;I0cS^XiL`y+>{cD}gd%lux} z)3N>MaE24WBZ}s0ApfdM;5J_Ny}rfUyxfkC``Awo2#sgLnGPewK};dORuT?@I6(5~ z?kE)Qh$L&fwJXzK){iYx!l5$Tt|^D~MkGZPA}(o6f7w~O2G6Vvzdo*a;iXzk$B66$ zwF#;wM7A+(;uFG4+UAY(2`*3XXx|V$K8AYu#ECJYSl@S=uZW$ksfC$~qrrbQj4??z-)uz0QL}>k^?fPnJTPw% zGz)~?B4}u0CzOf@l^um}HZzbaIwPmb<)< zi_3@E9lc)Qe2_`*Z^HH;1CXOceL=CHpHS{HySy3T%<^NrWQ}G0i4e1xm_K3(+~oi$ zoHl9wzb?Z4j#90DtURtjtgvi7uw8DzHYmtPb;?%8vb9n@bszT=1qr)V_>R%s!92_` zfnHQPANx z<#hIjIMm#*(v*!OXtF+w8kLu`o?VZ5k7{`vw{Yc^qYclpUGIM_PBN1+c{#Vxv&E*@ zxg=W2W~JuV{IuRYw3>LSI1)a!thID@R=bU+cU@DbR^_SXY`MC7HOsCN z!dO4OKV7(E_Z8T#8MA1H`99?Z!r0)qKW_#|29X3#Jb+5+>qUidbeP1NJ@)(qi2S-X zao|f0_tl(O+$R|Qwd$H{_ig|~I1fbp_$NkI!0E;Y z6JrnU{1Ra6^on{9gUUB0mwzP3S%B#h0fjo>JvV~#+X0P~JV=IG=yHG$O+p5O3NUgG zEQ}z6BTp^Fie)Sg<){Z&I8NwPR(=mO4joTLHkJ>|Tnk23E(Bo`FSbPc05lF2-+)X? z6vV3*m~IBHTy*^E!<0nA(tCOJW2G4DsH7)BxLV8kICn5lu6@U*R`w)o9;Ro$i8=Q^V%uH8n3q=+Yf;SFRZu z!+F&PKcH#8cG?aSK_Tl@K9P#8o+jry@gdexz&d(Q=47<7nw@e@FFfIRNL9^)1i@;A z28+$Z#rjv-wj#heI|<&J_DiJ*s}xd-f!{J8jfqOHE`TiHHZVIA8CjkNQ_u;Ery^^t zl1I75&u^`1_q)crO+JT4rx|z2ToSC>)Or@-D zy3S>jW*sNIZR-EBsfyaJ+Jq4BQE4?SePtD2+jY8*%FsSLZ9MY>+wk?}}}AFAw)vr{ml)8LUG-y9>^t!{~|sgpxYc0Gnkg`&~R z-pilJZjr@y5$>B=VMdZ73svct%##v%wdX~9fz6i3Q-zOKJ9wso+h?VME7}SjL=!NUG{J?M&i!>ma`eoEa@IX`5G>B1(7;%}M*%-# zfhJ(W{y;>MRz!Ic8=S}VaBKqh;~7KdnGEHxcL$kA-6E~=!hrN*zw9N+_=odt<$_H_8dbo;0=42wcAETPCVGUr~v(`Uai zb{=D!Qc!dOEU6v)2eHSZq%5iqK?B(JlCq%T6av$Cb4Rko6onlG&?CqaX7Y_C_cOC3 zYZ;_oI(}=>_07}Oep&Ws7x7-R)cc8zfe!SYxJYP``pi$FDS)4Fvw5HH=FiU6xfVqIM!hJ;Rx8c0cB7~aPtNH(Nmm5Vh{ibAoU#J6 zImRCr?(iyu_4W_6AWo3*vxTPUw@vPwy@E0`(>1Qi=%>5eSIrp^`` zK*Y?fK_6F1W>-7UsB)RPC4>>Ps9)f+^MqM}8AUm@tZ->j%&h1M8s*s!LX5&WxQcAh z8mciQej@RPm?660%>{_D+7er>%zX_{s|$Z+;G7_sfNfBgY(zLB4Ey}J9F>zX#K0f6 z?dVNIeEh?EIShmP6>M+d|0wMM85Sa4diw1hrg|ITJ}JDg@o8y>(rF9mXk5M z2@D|NA)-7>wD&wF;S_$KS=eE84`BGw3g0?6wGxu8ys4rwI?9U=*^VF22t3%mbGeOh z`!O-OpF7#Vceu~F`${bW0nYVU9ecmk31V{tF%iv&5hWofC>I~cqAt@u6|R+|HLMMX zVxuSlMFOK_EQ86#E8&KwxIr8S9tj_goWtLv4f@!&h8;Ov41{J~496vp9vX=(LK#j! zAwi*21RAV-LD>9Cw3bV_9X(X3)Kr0-UaB*7Y>t82EQ%!)(&(XuAYtTsYy-dz+w=$ir)VJpe!_$ z6SGpX^i(af3{o=VlFPC);|J8#(=_8#vdxDe|Cok+ANhYwbE*FO`Su2m1~w+&9<_9~ z-|tTU_ACGN`~CNW5WYYBn^B#SwZ(t4%3aPp z;o)|L6Rk569KGxFLUPx@!6OOa+5OjQLK5w&nAmwxkC5rZ|m&HT8G%GVZxB_@ME z>>{rnXUqyiJrT(8GMj_ap#yN_!9-lO5e8mR3cJiK3NE{_UM&=*vIU`YkiL$1%kf+1 z4=jk@7EEj`u(jy$HnzE33ZVW_J4bj}K;vT?T91YlO(|Y0FU4r+VdbmQ97%(J5 zkK*Bed8+C}FcZ@HIgdCMioV%A<*4pw_n}l*{Cr4}a(lq|injK#O?$tyvyE`S%(1`H z_wwRvk#13ElkZvij2MFGOj`fhy?nC^8`Zyo%yVcUAfEr8x&J#A{|moUBAV_^f$hpaUuyQeY3da^ zS9iRgf87YBwfe}>BO+T&Fl%rfpZh#+AM?Dq-k$Bq`vG6G_b4z%Kbd&v>qFjow*mBl z-OylnqOpLg}or7_VNwRg2za3VBK6FUfFX{|TD z`Wt0Vm2H$vdlRWYQJqDmM?JUbVqL*ZQY|5&sY*?!&%P8qhA~5+Af<{MaGo(dl&C5t zE%t!J0 zh6jqANt4ABdPxSTrVV}fLsRQal*)l&_*rFq(Ez}ClEH6LHv{J#v?+H-BZ2)Wy{K@9 z+ovXHq~DiDvm>O~r$LJo!cOuwL+Oa--6;UFE2q@g3N8Qkw5E>ytz^(&($!O47+i~$ zKM+tkAd-RbmP{s_rh+ugTD;lriL~`Xwkad#;_aM?nQ7L_muEFI}U_4$phjvYgleK~`Fo`;GiC07&Hq1F<%p;9Q;tv5b?*QnR%8DYJH3P>Svmv47Y>*LPZJy8_{9H`g6kQpyZU{oJ`m%&p~D=K#KpfoJ@ zn-3cqmHsdtN!f?~w+(t+I`*7GQA#EQC^lUA9(i6=i1PqSAc|ha91I%X&nXzjYaM{8$s&wEx@aVkQ6M{E2 zfzId#&r(XwUNtPcq4Ngze^+XaJA1EK-%&C9j>^9(secqe{}z>hR5CFNveMsVA)m#S zk)_%SidkY-XmMWlVnQ(mNJ>)ooszQ#vaK;!rPmGKXV7am^_F!Lz>;~{VrIO$;!#30XRhE1QqO_~#+Ux;B_D{Nk=grn z8Y0oR^4RqtcYM)7a%@B(XdbZCOqnX#fD{BQTeLvRHd(irHKq=4*jq34`6@VAQR8WG z^%)@5CXnD_T#f%@-l${>y$tfb>2LPmc{~5A82|16mH)R?&r#KKLs7xpN-D`=&Cm^R zvMA6#Ahr<3X>Q7|-qfTY)}32HkAz$_mibYV!I)u>bmjK`qwBe(>za^0Kt*HnFbSdO z1>+ryKCNxmm^)*$XfiDOF2|{-v3KKB?&!(S_Y=Ht@|ir^hLd978xuI&N{k>?(*f8H z=ClxVJK_%_z1TH0eUwm2J+2To7FK4o+n_na)&#VLn1m;!+CX+~WC+qg1?PA~KdOlC zW)C@pw75_xoe=w7i|r9KGIvQ$+3K?L{7TGHwrQM{dCp=Z*D}3kX7E-@sZnup!BImw z*T#a=+WcTwL78exTgBn|iNE3#EsOorO z*kt)gDzHiPt07fmisA2LWN?AymkdqTgr?=loT7z@d`wnlr6oN}@o|&JX!yPzC*Y8d zu6kWlTzE1)ckyBn+0Y^HMN+GA$wUO_LN6W>mxCo!0?oiQvT`z$jbSEu&{UHRU0E8# z%B^wOc@S!yhMT49Y)ww(Xta^8pmPCe@eI5C*ed96)AX9<>))nKx0(sci8gwob_1}4 z0DIL&vsJ1_s%<@y%U*-eX z5rN&(zef-5G~?@r79oZGW1d!WaTqQn0F6RIOa9tJ=0(kdd{d1{<*tHT#cCvl*i>YY zH+L7jq8xZNcTUBqj(S)ztTU!TM!RQ}In*n&Gn<>(60G7}4%WQL!o>hbJqNDSGwl#H z`4k+twp0cj%PsS+NKaxslAEu9!#U3xT1|_KB6`h=PI0SW`P9GTa7caD1}vKEglV8# zjKZR`pluCW19c2fM&ZG)c3T3Um;ir3y(tSCJ7Agl6|b524dy5El{^EQBG?E61H0XY z`bqg!;zhGhyMFl&(o=JWEJ8n~z)xI}A@C0d2hQGvw7nGv)?POU@(kS1m=%`|+^ika zXl8zjS?xqW$WlO?Ewa;vF~XbybHBor$f<%I&*t$F5fynwZlTGj|IjZtVfGa7l&tK} zW>I<69w(cZLu)QIVG|M2xzW@S+70NinQzk&Y0+3WT*cC)rx~04O-^<{JohU_&HL5XdUKW!uFy|i$FB|EMu0eUyW;gsf`XfIc!Z0V zeK&*hPL}f_cX=@iv>K%S5kL;cl_$v?n(Q9f_cChk8Lq$glT|=e+T*8O4H2n<=NGmn z+2*h+v;kBvF>}&0RDS>)B{1!_*XuE8A$Y=G8w^qGMtfudDBsD5>T5SB;Qo}fSkkiV ze^K^M(UthkwrD!&*tTsu>Dacdj_q`~V%r_twr$(Ct&_dKeeXE?fA&4&yASJWJ*}~- zel=@W)tusynfC_YqH4ll>4Eg`Xjs5F7Tj>tTLz<0N3)X<1px_d2yUY>X~y>>93*$) z5PuNMQLf9Bu?AAGO~a_|J2akO1M*@VYN^VxvP0F$2>;Zb9;d5Yfd8P%oFCCoZE$ z4#N$^J8rxYjUE_6{T%Y>MmWfHgScpuGv59#4u6fpTF%~KB^Ae`t1TD_^Ud#DhL+Dm zbY^VAM#MrAmFj{3-BpVSWph2b_Y6gCnCAombVa|1S@DU)2r9W<> zT5L8BB^er3zxKt1v(y&OYk!^aoQisqU zH(g@_o)D~BufUXcPt!Ydom)e|aW{XiMnes2z&rE?og>7|G+tp7&^;q?Qz5S5^yd$i z8lWr4g5nctBHtigX%0%XzIAB8U|T6&JsC4&^hZBw^*aIcuNO47de?|pGXJ4t}BB`L^d8tD`H`i zqrP8?#J@8T#;{^B!KO6J=@OWKhAerih(phML`(Rg7N1XWf1TN>=Z3Do{l_!d~DND&)O)D>ta20}@Lt77qSnVsA7>)uZAaT9bsB>u&aUQl+7GiY2|dAEg@%Al3i316y;&IhQL^8fw_nwS>f60M_-m+!5)S_6EPM7Y)(Nq^8gL7(3 zOiot`6Wy6%vw~a_H?1hLVzIT^i1;HedHgW9-P#)}Y6vF%C=P70X0Tk^z9Te@kPILI z_(gk!k+0%CG)%!WnBjjw*kAKs_lf#=5HXC00s-}oM-Q1aXYLj)(1d!_a7 z*Gg4Fe6F$*ujVjI|79Z5+Pr`us%zW@ln++2l+0hsngv<{mJ%?OfSo_3HJXOCys{Ug z00*YR-(fv<=&%Q!j%b-_ppA$JsTm^_L4x`$k{VpfLI(FMCap%LFAyq;#ns5bR7V+x zO!o;c5y~DyBPqdVQX)8G^G&jWkBy2|oWTw>)?5u}SAsI$RjT#)lTV&Rf8;>u*qXnb z8F%Xb=7#$m)83z%`E;49)t3fHInhtc#kx4wSLLms!*~Z$V?bTyUGiS&m>1P(952(H zuHdv=;o*{;5#X-uAyon`hP}d#U{uDlV?W?_5UjJvf%11hKwe&(&9_~{W)*y1nR5f_ z!N(R74nNK`y8>B!0Bt_Vr!;nc3W>~RiKtGSBkNlsR#-t^&;$W#)f9tTlZz>n*+Fjz z3zXZ;jf(sTM(oDzJt4FJS*8c&;PLTW(IQDFs_5QPy+7yhi1syPCarvqrHFcf&yTy)^O<1EBx;Ir`5W{TIM>{8w&PB>ro4;YD<5LF^TjTb0!zAP|QijA+1Vg>{Afv^% zmrkc4o6rvBI;Q8rj4*=AZacy*n8B{&G3VJc)so4$XUoie0)vr;qzPZVbb<#Fc=j+8CGBWe$n|3K& z_@%?{l|TzKSlUEO{U{{%Fz_pVDxs7i9H#bnbCw7@4DR=}r_qV!Zo~CvD4ZI*+j3kO zW6_=|S`)(*gM0Z;;}nj`73OigF4p6_NPZQ-Od~e$c_);;4-7sR>+2u$6m$Gf%T{aq zle>e3(*Rt(TPD}03n5)!Ca8Pu!V}m6v0o1;5<1h$*|7z|^(3$Y&;KHKTT}hV056wuF0Xo@mK-52~r=6^SI1NC%c~CC?n>yX6wPTgiWYVz!Sx^atLby9YNn1Rk{g?|pJaxD4|9cUf|V1_I*w zzxK)hRh9%zOl=*$?XUjly5z8?jPMy%vEN)f%T*|WO|bp5NWv@B(K3D6LMl!-6dQg0 zXNE&O>Oyf%K@`ngCvbGPR>HRg5!1IV$_}m@3dWB7x3t&KFyOJn9pxRXCAzFr&%37wXG;z^xaO$ekR=LJG ztIHpY8F5xBP{mtQidqNRoz= z@){+N3(VO5bD+VrmS^YjG@+JO{EOIW)9=F4v_$Ed8rZtHvjpiEp{r^c4F6Ic#ChlC zJX^DtSK+v(YdCW)^EFcs=XP7S>Y!4=xgmv>{S$~@h=xW-G4FF9?I@zYN$e5oF9g$# zb!eVU#J+NjLyX;yb)%SY)xJdvGhsnE*JEkuOVo^k5PyS=o#vq!KD46UTW_%R=Y&0G zFj6bV{`Y6)YoKgqnir2&+sl+i6foAn-**Zd1{_;Zb7Ki=u394C5J{l^H@XN`_6XTKY%X1AgQM6KycJ+= zYO=&t#5oSKB^pYhNdzPgH~aEGW2=ec1O#s-KG z71}LOg@4UEFtp3GY1PBemXpNs6UK-ax*)#$J^pC_me;Z$Je(OqLoh|ZrW*mAMBFn< zHttjwC&fkVfMnQeen8`Rvy^$pNRFVaiEN4Pih*Y3@jo!T0nsClN)pdrr9AYLcZxZ| zJ5Wlj+4q~($hbtuY zVQ7hl>4-+@6g1i`1a)rvtp-;b0>^`Dloy(#{z~ytgv=j4q^Kl}wD>K_Y!l~ zp(_&7sh`vfO(1*MO!B%<6E_bx1)&s+Ae`O)a|X=J9y~XDa@UB`m)`tSG4AUhoM=5& znWoHlA-(z@3n0=l{E)R-p8sB9XkV zZ#D8wietfHL?J5X0%&fGg@MH~(rNS2`GHS4xTo7L$>TPme+Is~!|79=^}QbPF>m%J zFMkGzSndiPO|E~hrhCeo@&Ea{M(ieIgRWMf)E}qeTxT8Q#g-!Lu*x$v8W^M^>?-g= zwMJ$dThI|~M06rG$Sv@C@tWR>_YgaG&!BAbkGggVQa#KdtDB)lMLNVLN|51C@F^y8 zCRvMB^{GO@j=cHfmy}_pCGbP%xb{pNN>? z?7tBz$1^zVaP|uaatYaIN+#xEN4jBzwZ|YI_)p(4CUAz1ZEbDk>J~Y|63SZaak~#0 zoYKruYsWHoOlC1(MhTnsdUOwQfz5p6-D0}4;DO$B;7#M{3lSE^jnTT;ns`>!G%i*F?@pR1JO{QTuD0U+~SlZxcc8~>IB{)@8p`P&+nDxNj`*gh|u?yrv$phpQcW)Us)bi`kT%qLj(fi{dWRZ%Es2!=3mI~UxiW0$-v3vUl?#g{p6eF zMEUAqo5-L0Ar(s{VlR9g=j7+lt!gP!UN2ICMokAZ5(Agd>})#gkA2w|5+<%-CuEP# zqgcM}u@3(QIC^Gx<2dbLj?cFSws_f3e%f4jeR?4M^M3cx1f+Qr6ydQ>n)kz1s##2w zk}UyQc+Z5G-d-1}{WzjkLXgS-2P7auWSJ%pSnD|Uivj5u!xk0 z_^-N9r9o;(rFDt~q1PvE#iJZ_f>J3gcP$)SOqhE~pD2|$=GvpL^d!r z6u=sp-CrMoF7;)}Zd7XO4XihC4ji?>V&(t^?@3Q&t9Mx=qex6C9d%{FE6dvU6%d94 zIE;hJ1J)cCqjv?F``7I*6bc#X)JW2b4f$L^>j{*$R`%5VHFi*+Q$2;nyieduE}qdS{L8y8F08yLs?w}{>8>$3236T-VMh@B zq-nujsb_1aUv_7g#)*rf9h%sFj*^mIcImRV*k~Vmw;%;YH(&ylYpy!&UjUVqqtfG` zox3esju?`unJJA_zKXRJP)rA3nXc$m^{S&-p|v|-0x9LHJm;XIww7C#R$?00l&Yyj z=e}gKUOpsImwW?N)+E(awoF@HyP^EhL+GlNB#k?R<2>95hz!h9sF@U20DHSB3~WMa zk90+858r@-+vWwkawJ)8ougd(i#1m3GLN{iSTylYz$brAsP%=&m$mQQrH$g%3-^VR zE%B`Vi&m8f3T~&myTEK28BDWCVzfWir1I?03;pX))|kY5ClO^+bae z*7E?g=3g7EiisYOrE+lA)2?Ln6q2*HLNpZEWMB|O-JI_oaHZB%CvYB(%=tU= zE*OY%QY58fW#RG5=gm0NR#iMB=EuNF@)%oZJ}nmm=tsJ?eGjia{e{yuU0l3{d^D@)kVDt=1PE)&tf_hHC%0MB znL|CRCPC}SeuVTdf>-QV70`0(EHizc21s^sU>y%hW0t!0&y<7}Wi-wGy>m%(-jsDj zP?mF|>p_K>liZ6ZP(w5(|9Ga%>tLgb$|doDDfkdW>Z z`)>V2XC?NJT26mL^@ zf+IKr27TfM!UbZ@?zRddC7#6ss1sw%CXJ4FWC+t3lHZupzM77m^=9 z&(a?-LxIq}*nvv)y?27lZ{j zifdl9hyJudyP2LpU$-kXctshbJDKS{WfulP5Dk~xU4Le4c#h^(YjJit4#R8_khheS z|8(>2ibaHES4+J|DBM7I#QF5u-*EdN{n=Kt@4Zt?@Tv{JZA{`4 zU#kYOv{#A&gGPwT+$Ud}AXlK3K7hYzo$(fBSFjrP{QQ zeaKg--L&jh$9N}`pu{Bs>?eDFPaWY4|9|foN%}i;3%;@4{dc+iw>m}{3rELqH21G! z`8@;w-zsJ1H(N3%|1B@#ioLOjib)j`EiJqPQVSbPSPVHCj6t5J&(NcWzBrzCiDt{4 zdlPAUKldz%6x5II1H_+jv)(xVL+a;P+-1hv_pM>gMRr%04@k;DTokASSKKhU1Qms| zrWh3a!b(J3n0>-tipg{a?UaKsP7?+|@A+1WPDiQIW1Sf@qDU~M_P65_s}7(gjTn0X zucyEm)o;f8UyshMy&>^SC3I|C6jR*R_GFwGranWZe*I>K+0k}pBuET&M~ z;Odo*ZcT?ZpduHyrf8E%IBFtv;JQ!N_m>!sV6ly$_1D{(&nO~w)G~Y`7sD3#hQk%^ zp}ucDF_$!6DAz*PM8yE(&~;%|=+h(Rn-=1Wykas_-@d&z#=S}rDf`4w(rVlcF&lF! z=1)M3YVz7orwk^BXhslJ8jR);sh^knJW(Qmm(QdSgIAIdlN4Te5KJisifjr?eB{FjAX1a0AB>d?qY4Wx>BZ8&}5K0fA+d{l8 z?^s&l8#j7pR&ijD?0b%;lL9l$P_mi2^*_OL+b}4kuLR$GAf85sOo02?Y#90}CCDiS zZ%rbCw>=H~CBO=C_JVV=xgDe%b4FaEFtuS7Q1##y686r%F6I)s-~2(}PWK|Z8M+Gu zl$y~5@#0Ka%$M<&Cv%L`a8X^@tY&T7<0|(6dNT=EsRe0%kp1Qyq!^43VAKYnr*A5~ zsI%lK1ewqO;0TpLrT9v}!@vJK{QoVa_+N4FYT#h?Y8rS1S&-G+m$FNMP?(8N`MZP zels(*?kK{{^g9DOzkuZXJ2;SrOQsp9T$hwRB1(phw1c7`!Q!by?Q#YsSM#I12RhU{$Q+{xj83axHcftEc$mNJ8_T7A-BQc*k(sZ+~NsO~xAA zxnbb%dam_fZlHvW7fKXrB~F&jS<4FD2FqY?VG?ix*r~MDXCE^WQ|W|WM;gsIA4lQP zJ2hAK@CF*3*VqPr2eeg6GzWFlICi8S>nO>5HvWzyZTE)hlkdC_>pBej*>o0EOHR|) z$?};&I4+_?wvL*g#PJ9)!bc#9BJu1(*RdNEn>#Oxta(VWeM40ola<0aOe2kSS~{^P zDJBd}0L-P#O-CzX*%+$#v;(x%<*SPgAje=F{Zh-@ucd2DA(yC|N_|ocs*|-!H%wEw z@Q!>siv2W;C^^j^59OAX03&}&D*W4EjCvfi(ygcL#~t8XGa#|NPO+*M@Y-)ctFA@I z-p7npT1#5zOLo>7q?aZpCZ=iecn3QYklP;gF0bq@>oyBq94f6C=;Csw3PkZ|5q=(c zfs`aw?II0e(h=|7o&T+hq&m$; zBrE09Twxd9BJ2P+QPN}*OdZ-JZV7%av@OM7v!!NL8R;%WFq*?{9T3{ct@2EKgc8h) zMxoM$SaF#p<`65BwIDfmXG6+OiK0e)`I=!A3E`+K@61f}0e z!2a*FOaDrOe>U`q%K!QN`&=&0C~)CaL3R4VY(NDt{Xz(Xpqru5=r#uQN1L$Je1*dkdqQ*=lofQaN%lO!<5z9ZlHgxt|`THd>2 zsWfU$9=p;yLyJyM^t zS2w9w?Bpto`@H^xJpZDKR1@~^30Il6oFGfk5%g6w*C+VM)+%R@gfIwNprOV5{F^M2 zO?n3DEzpT+EoSV-%OdvZvNF+pDd-ZVZ&d8 zKeIyrrfPN=EcFRCPEDCVflX#3-)Ik_HCkL(ejmY8vzcf-MTA{oHk!R2*36`O68$7J zf}zJC+bbQk--9Xm!u#lgLvx8TXx2J258E5^*IZ(FXMpq$2LUUvhWQPs((z1+2{Op% z?J}9k5^N=z;7ja~zi8a_-exIqWUBJwohe#4QJ`|FF*$C{lM18z^#hX6!5B8KAkLUX ziP=oti-gpV(BsLD{0(3*dw}4JxK23Y7M{BeFPucw!sHpY&l%Ws4pSm`+~V7;bZ%Dx zeI)MK=4vC&5#;2MT7fS?^ch9?2;%<8Jlu-IB&N~gg8t;6S-#C@!NU{`p7M8@2iGc& zg|JPg%@gCoCQ&s6JvDU&`X2S<57f(k8nJ1wvBu{8r?;q3_kpZZ${?|( z+^)UvR33sjSd)aT!UPkA;ylO6{aE3MQa{g%Mcf$1KONcjO@&g5zPHWtzM1rYC{_K> zgQNcs<{&X{OA=cEWw5JGqpr0O>x*Tfak2PE9?FuWtz^DDNI}rwAaT0(bdo-<+SJ6A z&}S%boGMWIS0L}=S>|-#kRX;e^sUsotry(MjE|3_9duvfc|nwF#NHuM-w7ZU!5ei8 z6Mkf>2)WunY2eU@C-Uj-A zG(z0Tz2YoBk>zCz_9-)4a>T46$(~kF+Y{#sA9MWH%5z#zNoz)sdXq7ZR_+`RZ%0(q zC7&GyS_|BGHNFl8Xa%@>iWh%Gr?=J5<(!OEjauj5jyrA-QXBjn0OAhJJ9+v=!LK`` z@g(`^*84Q4jcDL`OA&ZV60djgwG`|bcD*i50O}Q{9_noRg|~?dj%VtKOnyRs$Uzqg z191aWoR^rDX#@iSq0n z?9Sg$WSRPqSeI<}&n1T3!6%Wj@5iw5`*`Btni~G=&;J+4`7g#OQTa>u`{4ZZ(c@s$ zK0y;ySOGD-UTjREKbru{QaS>HjN<2)R%Nn-TZiQ(Twe4p@-saNa3~p{?^V9Nixz@a zykPv~<@lu6-Ng9i$Lrk(xi2Tri3q=RW`BJYOPC;S0Yly%77c727Yj-d1vF!Fuk{Xh z)lMbA69y7*5ufET>P*gXQrxsW+ zz)*MbHZv*eJPEXYE<6g6_M7N%#%mR{#awV3i^PafNv(zyI)&bH?F}2s8_rR(6%!V4SOWlup`TKAb@ee>!9JKPM=&8g#BeYRH9FpFybxBXQI2|g}FGJfJ+ zY-*2hB?o{TVL;Wt_ek;AP5PBqfDR4@Z->_182W z{P@Mc27j6jE*9xG{R$>6_;i=y{qf(c`5w9fa*`rEzX6t!KJ(p1H|>J1pC-2zqWENF zmm=Z5B4u{cY2XYl(PfrInB*~WGWik3@1oRhiMOS|D;acnf-Bs(QCm#wR;@Vf!hOPJ zgjhDCfDj$HcyVLJ=AaTbQ{@vIv14LWWF$=i-BDoC11}V;2V8A`S>_x)vIq44-VB-v z*w-d}$G+Ql?En8j!~ZkCpQ$|cA0|+rrY>tiCeWxkRGPoarxlGU2?7%k#F693RHT24 z-?JsiXlT2PTqZqNb&sSc>$d;O4V@|b6VKSWQb~bUaWn1Cf0+K%`Q&Wc<>mQ>*iEGB zbZ;aYOotBZ{vH3y<0A*L0QVM|#rf*LIsGx(O*-7)r@yyBIzJnBFSKBUSl1e|8lxU* zzFL+YDVVkIuzFWeJ8AbgN&w(4-7zbiaMn{5!JQXu)SELk*CNL+Fro|2v|YO)1l15t zs(0^&EB6DPMyaqvY>=KL>)tEpsn;N5Q#yJj<9}ImL((SqErWN3Q=;tBO~ExTCs9hB z2E$7eN#5wX4<3m^5pdjm#5o>s#eS_Q^P)tm$@SawTqF*1dj_i#)3};JslbLKHXl_N z)Fxzf>FN)EK&Rz&*|6&%Hs-^f{V|+_vL1S;-1K-l$5xiC@}%uDuwHYhmsV?YcOUlk zOYkG5v2+`+UWqpn0aaaqrD3lYdh0*!L`3FAsNKu=Q!vJu?Yc8n|CoYyDo_`r0mPoo z8>XCo$W4>l(==h?2~PoRR*kEe)&IH{1sM41mO#-36`02m#nTX{r*r`Q5rZ2-sE|nA zhnn5T#s#v`52T5|?GNS`%HgS2;R(*|^egNPDzzH_z^W)-Q98~$#YAe)cEZ%vge965AS_am#DK#pjPRr-!^za8>`kksCAUj(Xr*1NW5~e zpypt_eJpD&4_bl_y?G%>^L}=>xAaV>KR6;^aBytqpiHe%!j;&MzI_>Sx7O%F%D*8s zSN}cS^<{iiK)=Ji`FpO#^zY!_|D)qeRNAtgmH)m;qC|mq^j(|hL`7uBz+ULUj37gj zksdbnU+LSVo35riSX_4z{UX=%n&}7s0{WuZYoSfwAP`8aKN9P@%e=~1`~1ASL-z%# zw>DO&ixr}c9%4InGc*_y42bdEk)ZdG7-mTu0bD@_vGAr*NcFoMW;@r?@LUhRI zCUJgHb`O?M3!w)|CPu~ej%fddw20lod?Ufp8Dmt0PbnA0J%KE^2~AIcnKP()025V> zG>noSM3$5Btmc$GZoyP^v1@Poz0FD(6YSTH@aD0}BXva?LphAiSz9f&Y(aDAzBnUh z?d2m``~{z;{}kZJ>a^wYI?ry(V9hIoh;|EFc0*-#*`$T0DRQ1;WsqInG;YPS+I4{g zJGpKk%%Sdc5xBa$Q^_I~(F97eqDO7AN3EN0u)PNBAb+n+ zWBTxQx^;O9o0`=g+Zrt_{lP!sgWZHW?8bLYS$;1a@&7w9rD9|Ge;Gb?sEjFoF9-6v z#!2)t{DMHZ2@0W*fCx;62d#;jouz`R5Y(t{BT=$N4yr^^o$ON8d{PQ=!O zX17^CrdM~7D-;ZrC!||<+FEOxI_WI3CA<35va%4v>gc zEX-@h8esj=a4szW7x{0g$hwoWRQG$yK{@3mqd-jYiVofJE!Wok1* znV7Gm&Ssq#hFuvj1sRyHg(6PFA5U*Q8Rx>-blOs=lb`qa{zFy&n4xY;sd$fE+<3EI z##W$P9M{B3c3Si9gw^jlPU-JqD~Cye;wr=XkV7BSv#6}DrsXWFJ3eUNrc%7{=^sP> zrp)BWKA9<}^R9g!0q7yWlh;gr_TEOD|#BmGq<@IV;ueg+D2}cjpp+dPf&Q(36sFU&K8}hA85U61faW&{ zlB`9HUl-WWCG|<1XANN3JVAkRYvr5U4q6;!G*MTdSUt*Mi=z_y3B1A9j-@aK{lNvx zK%p23>M&=KTCgR!Ee8c?DAO2_R?B zkaqr6^BSP!8dHXxj%N1l+V$_%vzHjqvu7p@%Nl6;>y*S}M!B=pz=aqUV#`;h%M0rU zHfcog>kv3UZAEB*g7Er@t6CF8kHDmKTjO@rejA^ULqn!`LwrEwOVmHx^;g|5PHm#B zZ+jjWgjJ!043F+&#_;D*mz%Q60=L9Ove|$gU&~As5^uz@2-BfQ!bW)Khn}G+Wyjw- z19qI#oB(RSNydn0t~;tAmK!P-d{b-@@E5|cdgOS#!>%#Rj6ynkMvaW@37E>@hJP^8 z2zk8VXx|>#R^JCcWdBCy{0nPmYFOxN55#^-rlqobe0#L6)bi?E?SPymF*a5oDDeSd zO0gx?#KMoOd&G(2O@*W)HgX6y_aa6iMCl^~`{@UR`nMQE`>n_{_aY5nA}vqU8mt8H z`oa=g0SyiLd~BxAj2~l$zRSDHxvDs;I4>+M$W`HbJ|g&P+$!U7-PHX4RAcR0szJ*( ze-417=bO2q{492SWrqDK+L3#ChUHtz*@MP)e^%@>_&#Yk^1|tv@j4%3T)diEX zATx4K*hcO`sY$jk#jN5WD<=C3nvuVsRh||qDHnc~;Kf59zr0;c7VkVSUPD%NnnJC_ zl3F^#f_rDu8l}l8qcAz0FFa)EAt32IUy_JLIhU_J^l~FRH&6-ivSpG2PRqzDdMWft>Zc(c)#tb%wgmWN%>IOPm zZi-noqS!^Ftb81pRcQi`X#UhWK70hy4tGW1mz|+vI8c*h@ zfFGJtW3r>qV>1Z0r|L>7I3un^gcep$AAWfZHRvB|E*kktY$qQP_$YG60C@X~tTQjB3%@`uz!qxtxF+LE!+=nrS^07hn` zEgAp!h|r03h7B!$#OZW#ACD+M;-5J!W+{h|6I;5cNnE(Y863%1(oH}_FTW})8zYb$7czP zg~Szk1+_NTm6SJ0MS_|oSz%e(S~P-&SFp;!k?uFayytV$8HPwuyELSXOs^27XvK-D zOx-Dl!P|28DK6iX>p#Yb%3`A&CG0X2S43FjN%IB}q(!hC$fG}yl1y9W&W&I@KTg6@ zK^kpH8=yFuP+vI^+59|3%Zqnb5lTDAykf z9S#X`3N(X^SpdMyWQGOQRjhiwlj!0W-yD<3aEj^&X%=?`6lCy~?`&WSWt z?U~EKFcCG_RJ(Qp7j=$I%H8t)Z@6VjA#>1f@EYiS8MRHZphp zMA_5`znM=pzUpBPO)pXGYpQ6gkine{6u_o!P@Q+NKJ}k!_X7u|qfpAyIJb$_#3@wJ z<1SE2Edkfk9C!0t%}8Yio09^F`YGzpaJHGk*-ffsn85@)%4@`;Fv^8q(-Wk7r=Q8p zT&hD`5(f?M{gfzGbbwh8(}G#|#fDuk7v1W)5H9wkorE0ZZjL0Q1=NRGY>zwgfm81DdoaVwNH;or{{eSyybt)m<=zXoA^RALYG-2t zouH|L*BLvmm9cdMmn+KGopyR@4*=&0&4g|FLoreZOhRmh=)R0bg~ zT2(8V_q7~42-zvb)+y959OAv!V$u(O3)%Es0M@CRFmG{5sovIq4%8Ahjk#*5w{+)+ zMWQoJI_r$HxL5km1#6(e@{lK3Udc~n0@g`g$s?VrnQJ$!oPnb?IHh-1qA`Rz$)Ai< z6w$-MJW-gKNvOhL+XMbE7&mFt`x1KY>k4(!KbbpZ`>`K@1J<(#vVbjx@Z@(6Q}MF# zMnbr-f55(cTa^q4+#)=s+ThMaV~E`B8V=|W_fZWDwiso8tNMTNse)RNBGi=gVwgg% zbOg8>mbRN%7^Um-7oj4=6`$|(K7!+t^90a{$18Z>}<#!bm%ZEFQ{X(yBZMc>lCz0f1I2w9Sq zuGh<9<=AO&g6BZte6hn>Qmvv;Rt)*cJfTr2=~EnGD8P$v3R|&1RCl&7)b+`=QGapi zPbLg_pxm`+HZurtFZ;wZ=`Vk*do~$wB zxoW&=j0OTbQ=Q%S8XJ%~qoa3Ea|au5o}_(P;=!y-AjFrERh%8la!z6Fn@lR?^E~H12D?8#ht=1F;7@o4$Q8GDj;sSC%Jfn01xgL&%F2 zwG1|5ikb^qHv&9hT8w83+yv&BQXOQyMVJSBL(Ky~p)gU3#%|blG?IR9rP^zUbs7rOA0X52Ao=GRt@C&zlyjNLv-} z9?*x{y(`509qhCV*B47f2hLrGl^<@SuRGR!KwHei?!CM10Tq*YDIoBNyRuO*>3FU? zHjipIE#B~y3FSfOsMfj~F9PNr*H?0oHyYB^G(YyNh{SxcE(Y-`x5jFMKb~HO*m+R% zrq|ic4fzJ#USpTm;X7K+E%xsT_3VHKe?*uc4-FsILUH;kL>_okY(w`VU*8+l>o>Jm ziU#?2^`>arnsl#)*R&nf_%>A+qwl%o{l(u)M?DK1^mf260_oteV3#E_>6Y4!_hhVD zM8AI6MM2V*^_M^sQ0dmHu11fy^kOqXqzpr?K$`}BKWG`=Es(9&S@K@)ZjA{lj3ea7_MBP zk(|hBFRjHVMN!sNUkrB;(cTP)T97M$0Dtc&UXSec<+q?y>5=)}S~{Z@ua;1xt@=T5 zI7{`Z=z_X*no8s>mY;>BvEXK%b`a6(DTS6t&b!vf_z#HM{Uoy_5fiB(zpkF{})ruka$iX*~pq1ZxD?q68dIo zIZSVls9kFGsTwvr4{T_LidcWtt$u{kJlW7moRaH6+A5hW&;;2O#$oKyEN8kx`LmG)Wfq4ykh+q{I3|RfVpkR&QH_x;t41Uw z`P+tft^E2B$domKT@|nNW`EHwyj>&}K;eDpe z1bNOh=fvIfk`&B61+S8ND<(KC%>y&?>opCnY*r5M+!UrWKxv0_QvTlJc>X#AaI^xo zaRXL}t5Ej_Z$y*|w*$6D+A?Lw-CO-$itm^{2Ct82-<0IW)0KMNvJHgBrdsIR0v~=H z?n6^}l{D``Me90`^o|q!olsF?UX3YSq^6Vu>Ijm>>PaZI8G@<^NGw{Cx&%|PwYrfw zR!gX_%AR=L3BFsf8LxI|K^J}deh0ZdV?$3r--FEX`#INxsOG6_=!v)DI>0q|BxT)z z-G6kzA01M?rba+G_mwNMQD1mbVbNTWmBi*{s_v_Ft9m2Avg!^78(QFu&n6mbRJ2bA zv!b;%yo{g*9l2)>tsZJOOp}U~8VUH`}$ z8p_}t*XIOehezolNa-a2x0BS})Y9}&*TPgua{Ewn-=wVrmJUeU39EKx+%w%=ixQWK zDLpwaNJs65#6o7Ln7~~X+p_o2BR1g~VCfxLzxA{HlWAI6^H;`juI=&r1jQrUv_q0Z z1Ja-tjdktrrP>GOC*#p?*xfQU5MqjMsBe!9lh(u8)w$e@Z|>aUHI5o;MGw*|Myiz3 z-f0;pHg~Q#%*Kx8MxH%AluVXjG2C$)WL-K63@Q`#y9_k_+}eR(x4~dp7oV-ek0H>I zgy8p#i4GN{>#v=pFYUQT(g&b$OeTy-X_#FDgNF8XyfGY6R!>inYn8IR2RDa&O!(6< znXs{W!bkP|s_YI*Yx%4stI`=ZO45IK6rBs`g7sP40ic}GZ58s?Mc$&i`kq_tfci>N zIHrC0H+Qpam1bNa=(`SRKjixBTtm&e`j9porEci!zdlg1RI0Jw#b(_Tb@RQK1Zxr_ z%7SUeH6=TrXt3J@js`4iDD0=IoHhK~I7^W8^Rcp~Yaf>2wVe|Hh1bUpX9ATD#moByY57-f2Ef1TP^lBi&p5_s7WGG9|0T}dlfxOx zXvScJO1Cnq`c`~{Dp;{;l<-KkCDE+pmexJkd}zCgE{eF=)K``-qC~IT6GcRog_)!X z?fK^F8UDz$(zFUrwuR$qro5>qqn>+Z%<5>;_*3pZ8QM|yv9CAtrAx;($>4l^_$_-L z*&?(77!-=zvnCVW&kUcZMb6;2!83si518Y%R*A3JZ8Is|kUCMu`!vxDgaWjs7^0j( ziTaS4HhQ)ldR=r)_7vYFUr%THE}cPF{0H45FJ5MQW^+W>P+eEX2kLp3zzFe*-pFVA zdDZRybv?H|>`9f$AKVjFWJ=wegO7hOOIYCtd?Vj{EYLT*^gl35|HQ`R=ti+ADm{jyQE7K@kdjuqJhWVSks>b^ zxha88-h3s;%3_5b1TqFCPTxVjvuB5U>v=HyZ$?JSk+&I%)M7KE*wOg<)1-Iy)8-K! z^XpIt|0ibmk9RtMmlUd7#Ap3Q!q9N4atQy)TmrhrFhfx1DAN`^vq@Q_SRl|V z#lU<~n67$mT)NvHh`%als+G-)x1`Y%4Bp*6Un5Ri9h=_Db zA-AdP!f>f0m@~>7X#uBM?diI@)Egjuz@jXKvm zJo+==juc9_<;CqeRaU9_Mz@;3e=E4=6TK+c`|uu#pIqhSyNm`G(X)&)B`8q0RBv#> z`gGlw(Q=1Xmf55VHj%C#^1lpc>LY8kfA@|rlC1EA<1#`iuyNO z(=;irt{_&K=i4)^x%;U(Xv<)+o=dczC5H3W~+e|f~{*ucxj@{Yi-cw^MqYr3fN zF5D+~!wd$#al?UfMnz(@K#wn`_5na@rRr8XqN@&M&FGEC@`+OEv}sI1hw>Up0qAWf zL#e4~&oM;TVfjRE+10B_gFlLEP9?Q-dARr3xi6nQqnw>k-S;~b z;!0s2VS4}W8b&pGuK=7im+t(`nz@FnT#VD|!)eQNp-W6)@>aA+j~K*H{$G`y2|QHY z|Hmy+CR@#jWY4~)lr1qBJB_RfHJFfP<}pK5(#ZZGSqcpyS&}01LnTWk5fzmXMGHkJ zTP6L^B+uj;lmB_W<~4=${+v0>z31M!-_O@o-O9GyW)j_mjx}!0@br_LE-7SIuPP84 z;5=O(U*g_um0tyG|61N@d9lEuOeiRd+#NY^{nd5;-CVlw&Ap7J?qwM^?E29wvS}2d zbzar4Fz&RSR(-|s!Z6+za&Z zY#D<5q_JUktIzvL0)yq_kLWG6DO{ri=?c!y!f(Dk%G{8)k`Gym%j#!OgXVDD3;$&v@qy#ISJfp=Vm>pls@9-mapVQChAHHd-x+OGx)(*Yr zC1qDUTZ6mM(b_hi!TuFF2k#8uI2;kD70AQ&di$L*4P*Y-@p`jdm%_c3f)XhYD^6M8&#Y$ZpzQMcR|6nsH>b=*R_Von!$BTRj7yGCXokoAQ z&ANvx0-Epw`QIEPgI(^cS2f(Y85yV@ygI{ewyv5Frng)e}KCZF7JbR(&W618_dcEh(#+^zZFY;o<815<5sOHQdeax9_!PyM&;{P zkBa5xymca0#)c#tke@3KNEM8a_mT&1gm;p&&JlMGH(cL(b)BckgMQ^9&vRwj!~3@l zY?L5}=Jzr080OGKb|y`ee(+`flQg|!lo6>=H)X4`$Gz~hLmu2a%kYW_Uu8x09Pa0J zKZ`E$BKJ=2GPj_3l*TEcZ*uYRr<*J^#5pILTT;k_cgto1ZL-%slyc16J~OH-(RgDA z%;EjEnoUkZ&acS{Q8`{i6T5^nywgqQI5bDIymoa7CSZG|WWVk>GM9)zy*bNih|QIm z%0+(Nnc*a_xo;$=!HQYaapLms>J1ToyjtFByY`C2H1wT#178#4+|{H0BBqtCdd$L% z_3Hc60j@{t9~MjM@LBalR&6@>B;9?r<7J~F+WXyYu*y3?px*=8MAK@EA+jRX8{CG?GI-< z54?Dc9CAh>QTAvyOEm0^+x;r2BWX|{3$Y7)L5l*qVE*y0`7J>l2wCmW zL1?|a`pJ-l{fb_N;R(Z9UMiSj6pQjOvQ^%DvhIJF!+Th7jO2~1f1N+(-TyCFYQZYw z4)>7caf^Ki_KJ^Zx2JUb z&$3zJy!*+rCV4%jqwyuNY3j1ZEiltS0xTzd+=itTb;IPYpaf?8Y+RSdVdpacB(bVQ zC(JupLfFp8y43%PMj2}T|VS@%LVp>hv4Y!RPMF?pp8U_$xCJ)S zQx!69>bphNTIb9yn*_yfj{N%bY)t{L1cs8<8|!f$;UQ*}IN=2<6lA;x^(`8t?;+ST zh)z4qeYYgZkIy{$4x28O-pugO&gauRh3;lti9)9Pvw+^)0!h~%m&8Q!AKX%urEMnl z?yEz?g#ODn$UM`+Q#$Q!6|zsq_`dLO5YK-6bJM6ya>}H+vnW^h?o$z;V&wvuM$dR& zeEq;uUUh$XR`TWeC$$c&Jjau2it3#%J-y}Qm>nW*s?En?R&6w@sDXMEr#8~$=b(gk zwDC3)NtAP;M2BW_lL^5ShpK$D%@|BnD{=!Tq)o(5@z3i7Z){} zGr}Exom_qDO{kAVkZ*MbLNHE666Kina#D{&>Jy%~w7yX$oj;cYCd^p9zy z8*+wgSEcj$4{WxKmCF(5o7U4jqwEvO&dm1H#7z}%VXAbW&W24v-tS6N3}qrm1OnE)fUkoE8yMMn9S$?IswS88tQWm4#Oid#ckgr6 zRtHm!mfNl-`d>O*1~d7%;~n+{Rph6BBy^95zqI{K((E!iFQ+h*C3EsbxNo_aRm5gj zKYug($r*Q#W9`p%Bf{bi6;IY0v`pB^^qu)gbg9QHQ7 zWBj(a1YSu)~2RK8Pi#C>{DMlrqFb9e_RehEHyI{n?e3vL_}L>kYJC z_ly$$)zFi*SFyNrnOt(B*7E$??s67EO%DgoZL2XNk8iVx~X_)o++4oaK1M|ou73vA0K^503j@uuVmLcHH4ya-kOIDfM%5%(E z+Xpt~#7y2!KB&)PoyCA+$~DXqxPxxALy!g-O?<9+9KTk4Pgq4AIdUkl`1<1#j^cJg zgU3`0hkHj_jxV>`Y~%LAZl^3o0}`Sm@iw7kwff{M%VwtN)|~!p{AsfA6vB5UolF~d zHWS%*uBDt<9y!9v2Xe|au&1j&iR1HXCdyCjxSgG*L{wmTD4(NQ=mFjpa~xooc6kju z`~+d{j7$h-;HAB04H!Zscu^hZffL#9!p$)9>sRI|Yovm)g@F>ZnosF2EgkU3ln0bR zTA}|+E(tt)!SG)-bEJi_0m{l+(cAz^pi}`9=~n?y&;2eG;d9{M6nj>BHGn(KA2n|O zt}$=FPq!j`p&kQ8>cirSzkU0c08%8{^Qyqi-w2LoO8)^E7;;I1;HQ6B$u0nNaX2CY zSmfi)F`m94zL8>#zu;8|{aBui@RzRKBlP1&mfFxEC@%cjl?NBs`cr^nm){>;$g?rhKr$AO&6qV_Wbn^}5tfFBry^e1`%du2~o zs$~dN;S_#%iwwA_QvmMjh%Qo?0?rR~6liyN5Xmej8(*V9ym*T`xAhHih-v$7U}8=dfXi2i*aAB!xM(Xekg*ix@r|ymDw*{*s0?dlVys2e)z62u1 z+k3esbJE=-P5S$&KdFp+2H7_2e=}OKDrf( z9-207?6$@f4m4B+9E*e((Y89!q?zH|mz_vM>kp*HGXldO0Hg#!EtFhRuOm$u8e~a9 z5(roy7m$Kh+zjW6@zw{&20u?1f2uP&boD}$#Zy)4o&T;vyBoqFiF2t;*g=|1=)PxB z8eM3Mp=l_obbc?I^xyLz?4Y1YDWPa+nm;O<$Cn;@ane616`J9OO2r=rZr{I_Kizyc zP#^^WCdIEp*()rRT+*YZK>V@^Zs=ht32x>Kwe zab)@ZEffz;VM4{XA6e421^h~`ji5r%)B{wZu#hD}f3$y@L0JV9f3g{-RK!A?vBUA}${YF(vO4)@`6f1 z-A|}e#LN{)(eXloDnX4Vs7eH|<@{r#LodP@Nz--$Dg_Par%DCpu2>2jUnqy~|J?eZ zBG4FVsz_A+ibdwv>mLp>P!(t}E>$JGaK$R~;fb{O3($y1ssQQo|5M;^JqC?7qe|hg zu0ZOqeFcp?qVn&Qu7FQJ4hcFi&|nR!*j)MF#b}QO^lN%5)4p*D^H+B){n8%VPUzi! zDihoGcP71a6!ab`l^hK&*dYrVYzJ0)#}xVrp!e;lI!+x+bfCN0KXwUAPU9@#l7@0& QuEJmfE|#`Dqx|px0L@K;Y5)KL literal 0 HcmV?d00001 diff --git a/integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.properties b/integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000000..19cfad969b --- /dev/null +++ b/integration/flink/examples/stateful/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,5 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/integration/flink/examples/stateful/gradlew b/integration/flink/examples/stateful/gradlew new file mode 100755 index 0000000000..744e882ed5 --- /dev/null +++ b/integration/flink/examples/stateful/gradlew @@ -0,0 +1,185 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MSYS* | MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/integration/flink/examples/stateful/src/main/java/io/openlineage/flink/FlinkStoppableApplication.java b/integration/flink/examples/stateful/src/main/java/io/openlineage/flink/FlinkStoppableApplication.java index f9a9e13485..50f823e0b6 100644 --- a/integration/flink/examples/stateful/src/main/java/io/openlineage/flink/FlinkStoppableApplication.java +++ b/integration/flink/examples/stateful/src/main/java/io/openlineage/flink/FlinkStoppableApplication.java @@ -29,9 +29,9 @@ import org.apache.flink.core.execution.SavepointFormatType; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; -import org.apache.hc.client5.http.impl.classic.HttpClients; import org.apache.hc.core5.http.io.entity.EntityUtils; import org.awaitility.Awaitility; import org.slf4j.Logger; @@ -71,7 +71,7 @@ public static void main(String[] args) throws Exception { Awaitility.await().until(() -> jobClient.getJobStatus().get().equals(JobStatus.RUNNING)); // wait for some checkpoints to be written - CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpClient httpClient = HttpClientBuilder.create().build(); String checkpointApiUrl = String.format( "http://%s:%s/jobs/%s/checkpoints", diff --git a/integration/flink/settings.gradle b/integration/flink/settings.gradle index fb061de765..2852057a49 100644 --- a/integration/flink/settings.gradle +++ b/integration/flink/settings.gradle @@ -5,4 +5,3 @@ include 'flink115' include 'flink117' include 'flink118' include 'app' -include 'examples:stateful' diff --git a/integration/flink/shared/build.gradle b/integration/flink/shared/build.gradle index f80e532cd8..894cb3dbb5 100644 --- a/integration/flink/shared/build.gradle +++ b/integration/flink/shared/build.gradle @@ -109,7 +109,7 @@ dependencies { annotationProcessor "org.projectlombok:lombok:${lombokVersion}" implementation 'org.javassist:javassist:3.30.2-GA' - implementation 'org.apache.httpcomponents.client5:httpclient5:5.3.1' + implementation 'org.apache.httpcomponents.client5:httpclient5:5.4.1' testImplementation "org.apache.flink:flink-java:$flinkVersion" testImplementation "org.apache.flink:flink-streaming-java:$flinkVersion" diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/ArgumentParserTest.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/ArgumentParserTest.java index a18fbf3bff..b21e50e04a 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/ArgumentParserTest.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/ArgumentParserTest.java @@ -22,7 +22,6 @@ import io.openlineage.client.transports.ConsoleConfig; import io.openlineage.client.transports.HttpConfig; import io.openlineage.client.transports.KafkaConfig; -import io.openlineage.client.transports.KinesisConfig; import io.openlineage.spark.api.SparkOpenLineageConfig; import java.util.Arrays; import java.util.Collections; @@ -70,14 +69,10 @@ void testTransportTypes() { SparkOpenLineageConfig configKafka = ArgumentParser.parse( new SparkConf().set(ArgumentParser.SPARK_CONF_TRANSPORT_TYPE, "kafka")); - SparkOpenLineageConfig configKinesis = - ArgumentParser.parse( - new SparkConf().set(ArgumentParser.SPARK_CONF_TRANSPORT_TYPE, "kinesis")); assertThat(config.getTransportConfig()).isInstanceOf(ConsoleConfig.class); assertThat(configHttp.getTransportConfig()).isInstanceOf(HttpConfig.class); assertThat(configKafka.getTransportConfig()).isInstanceOf(KafkaConfig.class); - assertThat(configKinesis.getTransportConfig()).isInstanceOf(KinesisConfig.class); } @Test @@ -146,25 +141,6 @@ void testConfToKafkaConfig() { assertEquals("test2", transportConfig.getProperties().get("test2")); } - @Test - void testConfToKinesisConfig() { - SparkConf sparkConf = - new SparkConf() - .set("spark.openlineage.transport.type", "kinesis") - .set("spark.openlineage.transport.streamName", "test") - .set("spark.openlineage.transport.region", "test") - .set("spark.openlineage.transport.roleArn", "test") - .set("spark.openlineage.transport.properties.test1", "test1") - .set("spark.openlineage.transport.properties.test2", "test2"); - SparkOpenLineageConfig config = ArgumentParser.parse(sparkConf); - KinesisConfig transportConfig = (KinesisConfig) config.getTransportConfig(); - assertEquals("test", transportConfig.getStreamName()); - assertEquals("test", transportConfig.getRegion()); - assertEquals("test", transportConfig.getRoleArn()); - assertEquals("test1", transportConfig.getProperties().get("test1")); - assertEquals("test2", transportConfig.getProperties().get("test2")); - } - @Test void testLoadConfigFromEnvVars() { try (MockedStatic mocked = mockStatic(Environment.class)) { diff --git a/integration/spark/build.gradle b/integration/spark/build.gradle index 5ea100bdb2..bf001bdd61 100644 --- a/integration/spark/build.gradle +++ b/integration/spark/build.gradle @@ -174,10 +174,10 @@ shadowJar { relocate "org.apache.httpcomponents.client5", "io.openlineage.spark.shaded.org.apache.httpcomponents.client5" relocate "javassist", "io.openlineage.spark.shaded.javassist" relocate "org.apache.hc", "io.openlineage.spark.shaded.org.apache.hc" + relocate "org.apache.http", "io.openlineage.spark.shaded.org.apache.http" relocate "org.apache.commons.codec", "io.openlineage.spark.shaded.org.apache.commons.codec" relocate "org.apache.commons.lang3", "io.openlineage.spark.shaded.org.apache.commons.lang3" relocate "org.apache.commons.beanutils", "io.openlineage.spark.shaded.org.apache.commons.beanutils" - relocate "org.apache.http", "io.openlineage.spark.shaded.org.apache.http" relocate 'org.yaml.snakeyaml', 'io.openlineage.spark.shaded.org.yaml.snakeyaml' relocate "com.fasterxml.jackson", "io.openlineage.spark.shaded.com.fasterxml.jackson" relocate "org.LatencyUtils", "io.openlineage.spark.shaded.org.latencyutils" diff --git a/integration/spark/buildDependencies.sh b/integration/spark/buildDependencies.sh index 729bb69205..8568011e59 100755 --- a/integration/spark/buildDependencies.sh +++ b/integration/spark/buildDependencies.sh @@ -1,5 +1,5 @@ #!/bin/bash set -x -e -(cd ../../client/java && ./gradlew publishToMavenLocal) -(cd ../spark-extension-interfaces && ./gradlew publishToMavenLocal) +(cd ../../client/java && ./gradlew clean publishToMavenLocal) +(cd ../spark-extension-interfaces && ./gradlew clean publishToMavenLocal) (cd ../sql/iface-java && ./script/compile.sh && ./script/build.sh) diff --git a/website/docs/client/java/partials/java_transport.md b/website/docs/client/java/partials/java_transport.md index 3e0c5a8f99..d3c32ee1dc 100644 --- a/website/docs/client/java/partials/java_transport.md +++ b/website/docs/client/java/partials/java_transport.md @@ -344,94 +344,6 @@ Default values are: - `job:{job.namespace}/{job.name}` - for JobEvent - `dataset:{dataset.namespace}/{dataset.name}` - for DatasetEvent -### [Kinesis](https://github.com/OpenLineage/OpenLineage/blob/main/client/java/src/main/java/io/openlineage/client/transports/KinesisTransport.java) - -If a transport type is set to `kinesis`, then the below parameters would be read and used when building KinesisProducer. -Also, KinesisTransport depends on you to provide artifact `com.amazonaws:amazon-kinesis-producer:0.14.0` or compatible on your classpath. - -#### Configuration - -- `type` - string, must be `"kinesis"`. Required. -- `streamName` - the streamName of the Kinesis. Required. -- `region` - the region of the Kinesis. Required. -- `roleArn` - the roleArn which is allowed to read/write to Kinesis stream. Optional. -- `properties` - a dictionary that contains a [Kinesis allowed properties](https://github.com/awslabs/amazon-kinesis-producer/blob/master/java/amazon-kinesis-producer-sample/default_config.properties). Optional. - -#### Behavior - -- Events are serialized to JSON, and then dispatched to the Kinesis stream. -- The partition key is generated as `{jobNamespace}:{jobName}`. -- Two constructors are available: one accepting both `KinesisProducer` and `KinesisConfig` and another solely accepting `KinesisConfig`. - -#### Examples - - - - -```yaml -transport: - type: kinesis - streamName: your_kinesis_stream_name - region: your_aws_region - roleArn: arn:aws:iam::account-id:role/role-name - properties: - VerifyCertificate: true - ConnectTimeout: 6000 -``` - - - - -```ini -spark.openlineage.transport.type=kinesis -spark.openlineage.transport.streamName=your_kinesis_stream_name -spark.openlineage.transport.region=your_aws_region -spark.openlineage.transport.roleArn=arn:aws:iam::account-id:role/role-name -spark.openlineage.transport.properties.VerifyCertificate=true -spark.openlineage.transport.properties.ConnectTimeout=6000 -``` - - - - -```ini -openlineage.transport.type=kinesis -openlineage.transport.streamName=your_kinesis_stream_name -openlineage.transport.region=your_aws_region -openlineage.transport.roleArn=arn:aws:iam::account-id:role/role-name -openlineage.transport.properties.VerifyCertificate=true -openlineage.transport.properties.ConnectTimeout=6000 -``` - - - - -```java -import java.util.Properties; - -import io.openlineage.client.OpenLineageClient; -import io.openlineage.client.transports.KinesisConfig; -import io.openlineage.client.transports.KinesisTransport; - -Properties kinesisProperties = new Properties(); -kinesisProperties.setProperty("property_name_1", "value_1"); -kinesisProperties.setProperty("property_name_2", "value_2"); - -KinesisConfig kinesisConfig = new KinesisConfig(); -kinesisConfig.setStreamName("your_kinesis_stream_name"); -kinesisConfig.setRegion("your_aws_region"); -kinesisConfig.setRoleArn("arn:aws:iam::account-id:role/role-name"); -kinesisConfig.setProperties(kinesisProperties); - -OpenLineageClient client = OpenLineageClient.builder() - .transport( - new KinesisTransport(httpConfig)) - .build(); -``` - - - - ### [Console](https://github.com/OpenLineage/OpenLineage/tree/main/client/java/src/main/java/io/openlineage/client/transports/ConsoleTransport.java) This straightforward transport emits OpenLineage events directly to the console through a logger. From bf4aaf01e91d2d6259c48dbb3d8f1c0b1c75b779 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 22:25:46 +0100 Subject: [PATCH 50/89] update changelog for release 1.24.0 (#3224) Signed-off-by: Maciej Obuchowski --- CHANGELOG.md | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e3f0b903e0..9cbd87e028 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,31 @@ # Changelog -## [Unreleased](https://github.com/OpenLineage/OpenLineage/compare/1.22.0...HEAD) +## [Unreleased](https://github.com/OpenLineage/OpenLineage/compare/1.24.0...HEAD) + +## [1.24.0](https://github.com/OpenLineage/OpenLineage/compare/1.23.0...1.24.0) - 2024-11-05 + +### Added +* **Spark: Add Dataproc run facet to include jobType property** [`#3167`](https://github.com/OpenLineage/OpenLineage/pull/3167) [@codelixir](https://github.com/codelixir) + *Updates the GCP Dataproc run facet to include jobType property* +* **Add EnvironmentVariablesRunFacet to core spec** [`#3186`](https://github.com/OpenLineage/OpenLineage/pull/3186) [@JDarDagran](https://github.com/JDarDagran) + *Use EnvironmentVariablesRunFacet in Python client* +* **Add assertions for format in test events** [`#3221`](https://github.com/OpenLineage/OpenLineage/pull/3221) [@JDarDagran](https://github.com/JDarDagran) +* **Spark: Add integration tests for EMR** [`#3142`](https://github.com/OpenLineage/OpenLineage/pull/3142) [@arturowczarek](https://github.com/arturowczarek) + *Spark integration has integration tests for EMR* + +### Changed +* **Move Kinesis to separate module, migrate HTTP transport to httpclient5** [`#3205`](https://github.com/OpenLineage/OpenLineage/pull/3205) [@mobuchowski](https://github.com/mobuchowski) + *Moves Kinesis integration to a separate module and updates HTTP transport to use HttpClient 5.x* +* **Docs: Upgrade docusaurus to 3.6** [`#3219`](https://github.com/OpenLineage/OpenLineage/pull/3219) [@arturowczarek](https://github.com/arturowczarek) +* **Spark: Limit the Seq size in RddPathUtils::extract()** [`#3148`](https://github.com/OpenLineage/OpenLineage/pull/3148) [@codelixir](https://github.com/codelixir) + *Adds flag to limit the logs in RddPathUtils::extract() to avoid OutOfMemoryError for large jobs* + +### Fixed +* **Docs: Fix outdated Spark-related docs** [`#3215`](https://github.com/OpenLineage/OpenLineage/pull/3215) [@mobuchowski](https://github.com/mobuchowski) +* **Fix docusaurus-mdx-checker errors** [`#3217`](https://github.com/OpenLineage/OpenLineage/pull/3217) [@arturowczarek](https://github.com/arturowczarek) +* **[Integration/dbt] Parse dbt source tests** [`#3208`](https://github.com/OpenLineage/OpenLineage/pull/3208) [@MassyB](https://github.com/MassyB) + *Fix: Consider dbt sources when looking for test results* +* **Avoid tests in configurable test** [`#3141`](https://github.com/OpenLineage/OpenLineage/pull/3141) [@pawel-leszczynski](https://github.com/pawel-leszczynski) ## [1.23.0](https://github.com/OpenLineage/OpenLineage/compare/1.22.0...1.23.0) - 2024-10-04 From 57021e51dd25932a6ee2d3d919a379311aae9933 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 22:26:01 +0100 Subject: [PATCH 51/89] Prepare for release 1.24.0 Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- integration/flink/README.md | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- integration/spark/gradle.properties | 2 +- integration/sql/iface-java/gradle.properties | 2 +- proxy/backend/gradle.properties | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index ab23c07812..a24eb72431 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.0-SNAPSHOT +version=1.24.0 org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/integration/flink/README.md b/integration/flink/README.md index 49481f0e5f..7f1ce54eff 100644 --- a/integration/flink/README.md +++ b/integration/flink/README.md @@ -14,7 +14,7 @@ Maven: io.openlineage openlineage-flink - 1.23.0 + 1.24.0 ``` diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index e28bc49492..758715eee1 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.0-SNAPSHOT +version=1.24.0 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index f063b71cf3..d7e7ae1ea0 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.24.0-SNAPSHOT +version=1.24.0 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index 6c90bf4b76..6add5a9127 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.24.0-SNAPSHOT \ No newline at end of file +version=1.24.0 \ No newline at end of file diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index 68b7e0a957..29323ab768 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.24.0-SNAPSHOT +version=1.24.0 org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index a93f661875..0ea312a064 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.24.0-SNAPSHOT +version=1.24.0 diff --git a/proxy/backend/gradle.properties b/proxy/backend/gradle.properties index a93f661875..0ea312a064 100644 --- a/proxy/backend/gradle.properties +++ b/proxy/backend/gradle.properties @@ -1 +1 @@ -version=1.24.0-SNAPSHOT +version=1.24.0 From fcc44642a0ca92cfb7dee545bfca6fcabcfd565b Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Tue, 5 Nov 2024 22:26:05 +0100 Subject: [PATCH 52/89] Prepare next development version 1.25.0-SNAPSHOT Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- .../resources/io/openlineage/flink/client/version.properties | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- .../resources/io/openlineage/flink/client/version.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- .../spark/shade/extension/v1/lifecycle/plan/version.properties | 2 +- integration/spark/gradle.properties | 2 +- .../resources/io/openlineage/spark/agent/version.properties | 2 +- .../resources/io/openlineage/spark/agent/version.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 31 files changed, 31 insertions(+), 31 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index a24eb72431..95d2528b0b 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.0 +version=1.25.0-SNAPSHOT org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index 9cd1ab461b..f8c3268e05 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.24.0" +__version__ = "1.25.0" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index 0a696a004f..dd37d8f32a 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.24.0" +version = "1.25.0" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index 658f7864db..475504cb6a 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index ba93b9775b..456bbbc940 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.0" +__version__ = "1.25.0" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index f7e6f86636..f89379c072 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index a66254b619..3ea320f109 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.0" +__version__ = "1.25.0" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index ba93b9775b..456bbbc940 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.0" +__version__ = "1.25.0" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index 26079ca734..c32d2e69d9 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index 4ca12e86b2..97eb781747 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.0" +__version__ = "1.25.0" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index ba93b9775b..456bbbc940 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.0" +__version__ = "1.25.0" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index 8e6b7c08b8..73aef8a906 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index 4f1248a385..31928fe033 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.0" +__version__ = "1.25.0" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index 71ea95e3e6..6b5bb9e0d4 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.24.0" +__version__ = "1.25.0" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index 38e98d41a2..bec3856aec 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 02ff2d23e0..26ee4da301 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.0" +__version__ = "1.25.0" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties b/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties index d91a6c7e15..4b0e29652f 100644 --- a/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties +++ b/integration/flink/app/src/test/resources/io/openlineage/flink/client/version.properties @@ -1 +1 @@ -version 1.24.0-SNAPSHOT +version 1.25.0-SNAPSHOT diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index 758715eee1..9e063ce8f5 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.0 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index d7e7ae1ea0..c16abd53be 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.24.0 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties b/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties index d91a6c7e15..4b0e29652f 100644 --- a/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties +++ b/integration/flink/shared/src/test/resources/io/openlineage/flink/client/version.properties @@ -1 +1 @@ -version 1.24.0-SNAPSHOT +version 1.25.0-SNAPSHOT diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index 6add5a9127..b986211c1f 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.24.0 \ No newline at end of file +version=1.25.0-SNAPSHOT \ No newline at end of file diff --git a/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties b/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties index d91a6c7e15..4b0e29652f 100644 --- a/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties +++ b/integration/spark-extension-interfaces/src/test/resources/io/openlineage/spark/shade/extension/v1/lifecycle/plan/version.properties @@ -1 +1 @@ -version 1.24.0-SNAPSHOT +version 1.25.0-SNAPSHOT diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index 29323ab768..6a310fb67b 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.24.0 +version=1.25.0-SNAPSHOT org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties b/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties index d91a6c7e15..4b0e29652f 100644 --- a/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties +++ b/integration/spark/spark2/src/test/resources/io/openlineage/spark/agent/version.properties @@ -1 +1 @@ -version 1.24.0-SNAPSHOT +version 1.25.0-SNAPSHOT diff --git a/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties b/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties index d91a6c7e15..4b0e29652f 100644 --- a/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties +++ b/integration/spark/spark3/src/test/resources/io/openlineage/spark/agent/version.properties @@ -1 +1 @@ -version 1.24.0-SNAPSHOT +version 1.25.0-SNAPSHOT diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index 87a7dae60b..87a8f7cd08 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.0 +current_version = 1.25.0 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index 7a60206e75..a8730af0fb 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.24.0" +version = "1.25.0" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index 0ea312a064..808b8dbffb 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.24.0 +version=1.25.0-SNAPSHOT diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index bc1b47d37a..6566782c6c 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.24.0" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index cb92237121..d60617aabe 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.24.0" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Wed, 6 Nov 2024 11:33:41 +0100 Subject: [PATCH 53/89] add build context to flink integration test (#3225) Signed-off-by: Maciej Obuchowski --- .circleci/workflows/openlineage-flink.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/workflows/openlineage-flink.yml b/.circleci/workflows/openlineage-flink.yml index 8a7be50d9d..cd8975a8c4 100644 --- a/.circleci/workflows/openlineage-flink.yml +++ b/.circleci/workflows/openlineage-flink.yml @@ -23,6 +23,7 @@ workflows: filters: tags: only: /^[0-9]+(\.[0-9]+){2}(-rc\.[0-9]+)?$/ + context: << pipeline.parameters.build-context >> matrix: parameters: flink-version: [ '1.15.4', '1.16.2', '1.17.1', '1.18.1', '1.19.0' ] From 159e57181130108713d86b742e2f3aa89f987e69 Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Wed, 6 Nov 2024 11:35:51 +0100 Subject: [PATCH 54/89] docs: Inject OpenLineage version in documentation (#3223) Signed-off-by: Artur Owczarek --- website/blog/openlineage-spark/index.mdx | 4 +- website/docs/client/java/java.md | 4 +- .../docs/client/java/partials/s3_transport.md | 2 +- website/docs/guides/spark.md | 4 +- .../docs/integrations/spark/installation.md | 18 +- .../spark/quickstart/quickstart_local.md | 2 +- website/docs/spec/producers.md | 2 +- website/docusaurus.config.js | 274 ++++++++++-------- 8 files changed, 166 insertions(+), 144 deletions(-) diff --git a/website/blog/openlineage-spark/index.mdx b/website/blog/openlineage-spark/index.mdx index ee121b4d8f..b46eb60572 100644 --- a/website/blog/openlineage-spark/index.mdx +++ b/website/blog/openlineage-spark/index.mdx @@ -51,7 +51,7 @@ familiar with it and how it's used in Spark applications. OpenLineage integrates interface and collecting information about jobs that are executed inside a Spark application. To activate the listener, add the following properties to your Spark configuration: ``` -spark.jars.packages io.openlineage:openlineage-spark:1.23.0 +spark.jars.packages io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}} spark.extraListeners io.openlineage.spark.agent.OpenLineageSparkListener ``` This can be added to your cluster’s `spark-defaults.conf` file, in which case it will record lineage for every job executed on the cluster, or added to specific jobs on submission via the `spark-submit` command. Once the listener is activated, it needs to know where to report lineage events, as well as the namespace of your jobs. Add the following additional configuration lines to your `spark-defaults.conf` file or your Spark submission script: @@ -122,7 +122,7 @@ spark = (SparkSession.builder.master('local').appName('openlineage_spark_test') .config('spark.jars', ",".join(files)) # Install and set up the OpenLineage listener - .config('spark.jars.packages', 'io.openlineage:openlineage-spark:1.23.0) + .config('spark.jars.packages', 'io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}}) .config('spark.extraListeners', 'io.openlineage.spark.agent.OpenLineageSparkListener') .config('spark.openlineage.transport.url', 'http://marquez-api:5000') .config('spark.openlineage.transport.type', 'http') diff --git a/website/docs/client/java/java.md b/website/docs/client/java/java.md index 3792e0bd64..cff5f991e0 100644 --- a/website/docs/client/java/java.md +++ b/website/docs/client/java/java.md @@ -24,14 +24,14 @@ Maven: io.openlineage openlineage-java - ${OPENLINEAGE_VERSION} + {{PREPROCESSOR:OPENLINEAGE_VERSION}} ``` or Gradle: ```groovy -implementation("io.openlineage:openlineage-java:${OPENLINEAGE_VERSION}") +implementation("io.openlineage:openlineage-java:{{PREPROCESSOR:OPENLINEAGE_VERSION}}") ``` For more information on the available versions of the `openlineage-java`, diff --git a/website/docs/client/java/partials/s3_transport.md b/website/docs/client/java/partials/s3_transport.md index c35ab00b17..99e6a34d91 100644 --- a/website/docs/client/java/partials/s3_transport.md +++ b/website/docs/client/java/partials/s3_transport.md @@ -15,7 +15,7 @@ to be emitted correctly. io.openlineage transports-s3 - YOUR_VERSION_HERE + {{PREPROCESSOR:OPENLINEAGE_VERSION}} ``` diff --git a/website/docs/guides/spark.md b/website/docs/guides/spark.md index 261fc5fb10..387e0884fd 100644 --- a/website/docs/guides/spark.md +++ b/website/docs/guides/spark.md @@ -13,7 +13,7 @@ This guide was developed using an **earlier version** of this integration and ma Adding OpenLineage to Spark is refreshingly uncomplicated, and this is thanks to Spark's SparkListener interface. OpenLineage integrates with Spark by implementing SparkListener and collecting information about jobs executed inside a Spark application. To activate the listener, add the following properties to your Spark configuration in your cluster's `spark-defaults.conf` file or, alternatively, add them to specific jobs on submission via the `spark-submit` command: ``` -spark.jars.packages io.openlineage:openlineage-spark:1.23.0 +spark.jars.packages io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}} spark.extraListeners io.openlineage.spark.agent.OpenLineageSparkListener ``` @@ -91,7 +91,7 @@ spark = (SparkSession.builder.master('local').appName('openlineage_spark_test') .config('spark.jars', ",".join(files)) # Install and set up the OpenLineage listener - .config('spark.jars.packages', 'io.openlineage:openlineage-spark:1.23.0') + .config('spark.jars.packages', 'io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}}') .config('spark.extraListeners', 'io.openlineage.spark.agent.OpenLineageSparkListener') .config('spark.openlineage.transport.url', 'http://marquez-api:5000') .config('spark.openlineage.transport.type', 'http') diff --git a/website/docs/integrations/spark/installation.md b/website/docs/integrations/spark/installation.md index 4b252ec377..bf187f0f5a 100644 --- a/website/docs/integrations/spark/installation.md +++ b/website/docs/integrations/spark/installation.md @@ -13,7 +13,7 @@ import TabItem from '@theme/TabItem'; The above necessitates a change in the artifact identifier for `io.openlineage:openlineage-spark`. After version `1.8.0`, the artifact identifier has been updated. For subsequent versions, utilize: -`io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:${OPENLINEAGE_SPARK_VERSION}`. +`io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:{{PREPROCESSOR:OPENLINEAGE_VERSION}}`. ::: To integrate OpenLineage Spark with your application, you can: @@ -39,7 +39,7 @@ For Maven, add the following to your `pom.xml`: io.openlineage openlineage-spark_${SCALA_BINARY_VERSION} - ${OPENLINEAGE_SPARK_VERSION} + {{PREPROCESSOR:OPENLINEAGE_VERSION}} ``` @@ -63,14 +63,14 @@ For Gradle, add this to your `build.gradle`: ```groovy -implementation("io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:${OPENLINEAGE_SPARK_VERSION}") +implementation("io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:{{PREPROCESSOR:OPENLINEAGE_VERSION}}") ``` ```groovy -implementation("io.openlineage:openlineage-spark:${OPENLINEAGE_SPARK_VERSION}") +implementation("io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}}") ``` @@ -100,7 +100,7 @@ if [ -z "$SPARK_HOME" ]; then exit 1 fi -OPENLINEAGE_SPARK_VERSION='1.9.0' # Example version +OPENLINEAGE_SPARK_VERSION='{{PREPROCESSOR:OPENLINEAGE_VERSION}}' SCALA_BINARY_VERSION='2.13' # Example Scala version ARTIFACT_ID="openlineage-spark_${SCALA_BINARY_VERSION}" JAR_NAME="${ARTIFACT_ID}-${OPENLINEAGE_SPARK_VERSION}.jar" @@ -172,7 +172,7 @@ This script demonstrate this process: ```bash #!/usr/bin/env bash -OPENLINEAGE_SPARK_VERSION='1.9.0' # Example version +OPENLINEAGE_SPARK_VERSION='{{PREPROCESSOR:OPENLINEAGE_VERSION}}' SCALA_BINARY_VERSION='2.13' # Example Scala version ARTIFACT_ID="openlineage-spark_${SCALA_BINARY_VERSION}" JAR_NAME="${ARTIFACT_ID}-${OPENLINEAGE_SPARK_VERSION}.jar" @@ -237,10 +237,10 @@ during runtime and adds it to the classpath of your Spark application. ```bash -OPENLINEAGE_SPARK_VERSION='1.9.0' # Example version +OPENLINEAGE_SPARK_VERSION='{{PREPROCESSOR:OPENLINEAGE_VERSION}}' SCALA_BINARY_VERSION='2.13' # Example Scala version -spark-submit --packages "io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:${OPENLINEAGE_SPARK_VERSION}" \ +spark-submit --packages "io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION}:{{PREPROCESSOR:OPENLINEAGE_VERSION}}" \ # ... other options ``` @@ -250,7 +250,7 @@ spark-submit --packages "io.openlineage:openlineage-spark_${SCALA_BINARY_VERSION ```bash OPENLINEAGE_SPARK_VERSION='1.8.0' # Example version -spark-submit --packages "io.openlineage:openlineage-spark::${OPENLINEAGE_SPARK_VERSION}" \ +spark-submit --packages "io.openlineage:openlineage-spark::{{PREPROCESSOR:OPENLINEAGE_VERSION}}" \ # ... other options ``` diff --git a/website/docs/integrations/spark/quickstart/quickstart_local.md b/website/docs/integrations/spark/quickstart/quickstart_local.md index 95e6a8583a..5cd41de125 100644 --- a/website/docs/integrations/spark/quickstart/quickstart_local.md +++ b/website/docs/integrations/spark/quickstart/quickstart_local.md @@ -44,7 +44,7 @@ from pyspark.sql import SparkSession spark = (SparkSession.builder.master('local') .appName('sample_spark') .config('spark.extraListeners', 'io.openlineage.spark.agent.OpenLineageSparkListener') - .config('spark.jars.packages', 'io.openlineage:openlineage-spark:1.7.0') + .config('spark.jars.packages', 'io.openlineage:openlineage-spark:{{PREPROCESSOR:OPENLINEAGE_VERSION}}') .config('spark.openlineage.transport.type', 'console') .getOrCreate()) ``` diff --git a/website/docs/spec/producers.md b/website/docs/spec/producers.md index 1e004585db..9e9f64f9bf 100644 --- a/website/docs/spec/producers.md +++ b/website/docs/spec/producers.md @@ -10,4 +10,4 @@ This page could use some extra detail! You're welcome to contribute using the Ed The `_producer` value is included in an OpenLineage request as a way to know how the metadata was generated. It is a URI that links to a source code SHA or the location where a package can be found. -For example, this field is populated by many of the common integrations. For example, the dbt integration will set this value to `https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt` and the Python client will set it to `https://github.com/OpenLineage/OpenLineage/tree/{__version__}/client/python`. \ No newline at end of file +For example, this field is populated by many of the common integrations. For example, the dbt integration will set this value to `https://github.com/OpenLineage/OpenLineage/tree/{{PREPROCESSOR:OPENLINEAGE_VERSION}}/integration/dbt` and the Python client will set it to `https://github.com/OpenLineage/OpenLineage/tree/{{PREPROCESSOR:OPENLINEAGE_VERSION}}/client/python`. \ No newline at end of file diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index d0db47d033..437bc951d9 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -2,151 +2,173 @@ // Note: type annotations allow type checking and IDEs autocompletion const prism = require('prism-react-renderer'); +const path = require('path'); +const fs = require('fs'); + +const getCurrentVersion = () => { + // The file is created when "docusaurus docs:version " is executed. + const versionsPath = path.resolve(__dirname, 'versions.json'); + if (fs.existsSync(versionsPath)) { + return require(versionsPath)[0]; + } else { + console.warn("The documentation is not versioned. Run \"docusaurus docs:version \"") + return ""; + } +} + +// Replaces every occurrence of "{{PREPROCESSOR:OPENLINEAGE_VERSION}}" with the current version of the documentation. +const openLineageVersionProvider = ({filePath, fileContent}) => { + return fileContent.replace(/{{PREPROCESSOR:OPENLINEAGE_VERSION}}/g, getCurrentVersion()); +}; const links = [ - { to: '/getting-started', label: 'Getting Started', position: 'left' }, - { to: '/resources', label: 'Resources', position: 'left' }, - { to: '/ecosystem', label: 'Ecosystem', position: 'left' }, - { to: '/community', label: 'Community', position: 'left' }, - { to: '/blog', label: 'Blog', position: 'left' }, - { to: '/docs', label: 'Docs', position: 'left' }, - { to: '/survey', label: 'Ecosystem Survey 2023', position: 'left' }, + {to: '/getting-started', label: 'Getting Started', position: 'left'}, + {to: '/resources', label: 'Resources', position: 'left'}, + {to: '/ecosystem', label: 'Ecosystem', position: 'left'}, + {to: '/community', label: 'Community', position: 'left'}, + {to: '/blog', label: 'Blog', position: 'left'}, + {to: '/docs', label: 'Docs', position: 'left'}, + {to: '/survey', label: 'Ecosystem Survey 2023', position: 'left'}, ] const linksSocial = [ - { href: 'https://fosstodon.org/@openlineage', label: 'Mastodon', rel: 'me' }, - { href: 'https://twitter.com/OpenLineage', label: 'Twitter' }, - { href: 'https://www.linkedin.com/groups/13927795/', label: 'LinkedIn'}, - { href: 'http://bit.ly/OpenLineageSlack', label: 'Slack' }, - { href: 'https://github.com/OpenLineage/OpenLineage', label: 'GitHub' } + {href: 'https://fosstodon.org/@openlineage', label: 'Mastodon', rel: 'me'}, + {href: 'https://twitter.com/OpenLineage', label: 'Twitter'}, + {href: 'https://www.linkedin.com/groups/13927795/', label: 'LinkedIn'}, + {href: 'http://bit.ly/OpenLineageSlack', label: 'Slack'}, + {href: 'https://github.com/OpenLineage/OpenLineage', label: 'GitHub'} ] /** @type {import('@docusaurus/types').Config} */ const config = { - title: 'OpenLineage', - tagline: 'OpenLineage', - url: 'https://openlineage.io', - baseUrl: '/', - onBrokenLinks: 'throw', - onBrokenMarkdownLinks: 'throw', - favicon: 'img/favicon.ico', - customFields: { - links: links, - linksSocial: linksSocial - }, + title: 'OpenLineage', + tagline: 'OpenLineage', + url: 'https://openlineage.io', + baseUrl: '/', + onBrokenLinks: 'throw', + onBrokenMarkdownLinks: 'throw', + favicon: 'img/favicon.ico', + customFields: { + links: links, + linksSocial: linksSocial + }, - organizationName: 'openlineage', - projectName: 'docs', - i18n: { - defaultLocale: 'en', - locales: ['en'], - }, + organizationName: 'openlineage', + projectName: 'docs', + i18n: { + defaultLocale: 'en', + locales: ['en'], + }, - presets: [ - [ - 'classic', - /** @type {import('@docusaurus/preset-classic').Options} */ - ({ - docs: { - sidebarPath: require.resolve('./sidebars.js'), - exclude: ['**/partials/**'], - editUrl: - 'https://github.com/OpenLineage/OpenLineage/tree/main/website/', - }, - theme: { - customCss: require.resolve('./src/css/custom.css'), - }, - blog: { - blogTitle: 'Blog', - blogDescription: 'Data lineage is the foundation for a new generation of powerful, context-aware data tools and best practices. OpenLineage enables consistent collection of lineage metadata, creating a deeper understanding of how data is produced and used.', - showReadingTime: true, - blogSidebarCount: 5, - blogSidebarTitle: 'Recent posts', - feedOptions: { - type: ['json'], - copyright: `Copyright © ${new Date().getFullYear()} The Linux Foundation®. All rights reserved.`, - }, - }, - pages: { - path: 'src/pages', - include: ['**/*.{js,jsx,ts,tsx,md,mdx}'], - exclude: [ - 'home.tsx', // this page served from plugin - '**/_*.{js,jsx,ts,tsx,md,mdx}', - '**/_*.{js,jsx,ts,tsx,md,mdx}', - '**/_*/**', - '**/*.test.{js,jsx,ts,tsx}', - '**/__tests__/**', - ], - mdxPageComponent: '@theme/MDXPage', - }, - gtag: { - trackingID: 'G-QMTWMLMX4M', - anonymizeIP: true, - }, - }), + presets: [ + [ + 'classic', + /** @type {import('@docusaurus/preset-classic').Options} */ + ({ + docs: { + sidebarPath: require.resolve('./sidebars.js'), + exclude: ['**/partials/**'], + editUrl: + 'https://github.com/OpenLineage/OpenLineage/tree/main/website/', + }, + theme: { + customCss: require.resolve('./src/css/custom.css'), + }, + blog: { + blogTitle: 'Blog', + blogDescription: 'Data lineage is the foundation for a new generation of powerful, context-aware data tools and best practices. OpenLineage enables consistent collection of lineage metadata, creating a deeper understanding of how data is produced and used.', + showReadingTime: true, + blogSidebarCount: 5, + blogSidebarTitle: 'Recent posts', + feedOptions: { + type: ['json'], + copyright: `Copyright © ${new Date().getFullYear()} The Linux Foundation®. All rights reserved.`, + }, + }, + pages: { + path: 'src/pages', + include: ['**/*.{js,jsx,ts,tsx,md,mdx}'], + exclude: [ + 'home.tsx', // this page served from plugin + '**/_*.{js,jsx,ts,tsx,md,mdx}', + '**/_*.{js,jsx,ts,tsx,md,mdx}', + '**/_*/**', + '**/*.test.{js,jsx,ts,tsx}', + '**/__tests__/**', + ], + mdxPageComponent: '@theme/MDXPage', + }, + gtag: { + trackingID: 'G-QMTWMLMX4M', + anonymizeIP: true, + }, + }), + ], ], - ], - plugins: [ - function tailwindcssPlugin(ctx, options) { - return { - name: "docusaurus-tailwindcss", - configurePostCss(postcssOptions) { - // Appends TailwindCSS and AutoPrefixer. - postcssOptions.plugins.push(require("tailwindcss")); - postcssOptions.plugins.push(require("autoprefixer")); - return postcssOptions; + plugins: [ + function tailwindcssPlugin(ctx, options) { + return { + name: "docusaurus-tailwindcss", + configurePostCss(postcssOptions) { + // Appends TailwindCSS and AutoPrefixer. + postcssOptions.plugins.push(require("tailwindcss")); + postcssOptions.plugins.push(require("autoprefixer")); + return postcssOptions; + }, + }; }, - }; - }, - [ - "./plugins/home-blog-plugin", - { - id: "blogs", - routeBasePath: "/", - path: "./blogs" - }, + [ + "./plugins/home-blog-plugin", + { + id: "blogs", + routeBasePath: "/", + path: "./blogs" + }, + ], + require.resolve('docusaurus-lunr-search') ], - require.resolve('docusaurus-lunr-search') - ], - themeConfig: + themeConfig: /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ - ({ - navbar: { - logo: { - alt: 'OpenLineage', - src: 'img/ol-logo.svg', + ({ + navbar: { + logo: { + alt: 'OpenLineage', + src: 'img/ol-logo.svg', + }, + items: [ + ...links, + { + href: 'https://github.com/OpenLineage/openlineage', + label: 'GitHub', + position: 'right', + } + ], + }, + prism: { + theme: prism.themes.github, + darkTheme: prism.themes.dracula, + additionalLanguages: ['java'], + }, + colorMode: { + defaultMode: 'light', + disableSwitch: true, + respectPrefersColorScheme: false, + }, + }), + + scripts: [ + { + src: 'https://plausible.io/js/script.js', + defer: true, + 'data-domain': 'openlineage.io', }, - items: [ - ...links, - { - href: 'https://github.com/OpenLineage/openlineage', - label: 'GitHub', - position: 'right', - } - ], - }, - prism: { - theme: prism.themes.github, - darkTheme: prism.themes.dracula, - additionalLanguages: ['java'], - }, - colorMode: { - defaultMode: 'light', - disableSwitch: true, - respectPrefersColorScheme: false, - }, - }), + ], - scripts: [ - { - src: 'https://plausible.io/js/script.js', - defer: true, - 'data-domain': 'openlineage.io', - }, - ], + markdown: { + preprocessor: openLineageVersionProvider + } }; module.exports = config; From 8906ff3dd8036d731ccce0591b73f1860a520859 Mon Sep 17 00:00:00 2001 From: Jakub Dardzinski Date: Wed, 6 Nov 2024 12:30:12 +0100 Subject: [PATCH 55/89] Make `from_dict` backwards compatible in Python client. (#3226) Signed-off-by: Jakub Dardzinski --- client/python/openlineage/client/client.py | 12 +++++-- client/python/tests/test_client.py | 37 ++++++++++------------ 2 files changed, 26 insertions(+), 23 deletions(-) diff --git a/client/python/openlineage/client/client.py b/client/python/openlineage/client/client.py index 2fcadbba42..b6efe62192 100644 --- a/client/python/openlineage/client/client.py +++ b/client/python/openlineage/client/client.py @@ -82,7 +82,7 @@ def __init__( # noqa: PLR0913 transport: Transport | None = None, factory: TransportFactory | None = None, *, - config: dict[str, str] | None = None, + config: dict[str, Any] | None = None, ) -> None: # Set parent's logging level if environment variable is present custom_logging_level = os.getenv("OPENLINEAGE_CLIENT_LOGGING", None) @@ -126,7 +126,15 @@ def from_environment(cls: type[_T]) -> _T: @classmethod def from_dict(cls: type[_T], config: dict[str, str]) -> _T: - return cls(config=config) + warnings.warn( + message=( + "Using `from_dict` to set transport is deprecated. " + "Use `config` parameter to fully configure OpenLineageClient." + ), + category=DeprecationWarning, + stacklevel=2, + ) + return cls(transport=get_default_factory().create(config=config), config={"transport": config}) def filter_event( self, diff --git a/client/python/tests/test_client.py b/client/python/tests/test_client.py index bbb76b7521..d3188a6759 100644 --- a/client/python/tests/test_client.py +++ b/client/python/tests/test_client.py @@ -402,8 +402,8 @@ def test_config(mocker: MockerFixture, root: Path) -> None: def test_openlineage_client_from_dict() -> None: - config_dict = {"transport": {"type": "http", "url": "http://localhost:5050"}} - client = OpenLineageClient.from_dict(config_dict) + transport_dict = {"type": "http", "url": "http://localhost:5050"} + client = OpenLineageClient.from_dict(transport_dict) assert client.transport.url == "http://localhost:5050" @@ -680,7 +680,7 @@ def test_config_property_loads_user_defined_config() -> None: ) def test_client_from_empty_dict_with_dynamic_env_vars() -> None: client = OpenLineageClient.from_dict({}) - assert client.transport.url == "http://localhost:5050" + assert client.transport.kind == ConsoleTransport.kind @patch.dict( @@ -691,7 +691,7 @@ def test_client_from_empty_dict_with_dynamic_env_vars() -> None: ) def test_client_from_empty_dict_with_url_env_var() -> None: client = OpenLineageClient.from_dict({}) - assert client.transport.url == "http://example.com" + assert client.transport.kind == ConsoleTransport.kind @patch.dict( @@ -701,15 +701,14 @@ def test_client_from_empty_dict_with_url_env_var() -> None: "OPENLINEAGE__TRANSPORT__URL": "http://localhost:5050", }, ) -def test_client_from_facets_config_in_dict_and_env_vars() -> None: - user_defined_config = { +def test_client_raises_from_wrong_dict() -> None: + config_without_transport = { "facets": { "environment_variables": ["VAR1", "VAR2"], } } - client = OpenLineageClient.from_dict(user_defined_config) - assert client.config.facets.environment_variables == ["VAR1", "VAR2"] - assert client.transport.url == "http://localhost:5050" + with pytest.raises(KeyError): + OpenLineageClient.from_dict(config_without_transport) @patch.dict( @@ -719,13 +718,11 @@ def test_client_from_facets_config_in_dict_and_env_vars() -> None: }, ) def test_client_from_facets_config_in_env_vars_and_transport_in_config() -> None: - user_defined_config = { - "transport": { - "type": "http", - "url": "http://localhost:5050", - } + transport_config = { + "type": "http", + "url": "http://localhost:5050", } - client = OpenLineageClient.from_dict(user_defined_config) + client = OpenLineageClient.from_dict(transport_config) assert client.config.facets.environment_variables == ["VAR1", "VAR2"] assert client.transport.url == "http://localhost:5050" @@ -739,11 +736,9 @@ def test_client_from_facets_config_in_env_vars_and_transport_in_config() -> None @patch("openlineage.client.client.OpenLineageClient._find_yaml_config_path") @patch("openlineage.client.client.OpenLineageClient._get_config_file_content") def test_config_merge_precedence(mock_get_config_content, mock_find_yaml) -> None: - user_defined_config = { - "transport": { - "type": "http", - "url": "http://localhost:5050", - } + transport_config = { + "type": "http", + "url": "http://localhost:5050", } mock_find_yaml.return_value = "config.yml" mock_get_config_content.return_value = { @@ -752,7 +747,7 @@ def test_config_merge_precedence(mock_get_config_content, mock_find_yaml) -> Non "auth": {"api_key": "another_token"}, } } - client = OpenLineageClient.from_dict(user_defined_config) + client = OpenLineageClient.from_dict(transport_config) config = client.config assert config.transport["type"] == "http" assert config.transport["url"] == "http://localhost:5050" From df1128546bb1fc35031192ecf912e6201dce10d4 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 6 Nov 2024 12:30:44 +0100 Subject: [PATCH 56/89] Prepare for release 1.24.1 Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- integration/flink/README.md | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- integration/spark/gradle.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 27 files changed, 27 insertions(+), 27 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index 95d2528b0b..9c3b18cb3a 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.25.0-SNAPSHOT +version=1.24.1 org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index f8c3268e05..944a076a18 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.25.0" +__version__ = "1.24.1" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index dd37d8f32a..ce99eefab2 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.25.0" +version = "1.24.1" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index 475504cb6a..dd407e1fa6 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index 456bbbc940..6647d21bd8 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.1" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index f89379c072..eb9dd64a37 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index 3ea320f109..ae82821f34 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.1" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index 456bbbc940..6647d21bd8 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.1" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index c32d2e69d9..f5aa0d20ec 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index 97eb781747..694a3ddb32 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.1" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index 456bbbc940..6647d21bd8 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.1" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index 73aef8a906..10a88c28d0 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index 31928fe033..aedbbac2ce 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.1" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index 6b5bb9e0d4..4e0fecb1b9 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.25.0" +__version__ = "1.24.1" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index bec3856aec..163c95497c 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 26ee4da301..57fd1424f8 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.1" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/README.md b/integration/flink/README.md index 7f1ce54eff..e00ab474dd 100644 --- a/integration/flink/README.md +++ b/integration/flink/README.md @@ -14,7 +14,7 @@ Maven: io.openlineage openlineage-flink - 1.24.0 + 1.24.1 ``` diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index 9e063ce8f5..73ce755b3e 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.25.0-SNAPSHOT +version=1.24.1 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index c16abd53be..f2384841f0 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.25.0-SNAPSHOT +version=1.24.1 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index b986211c1f..3d8bc86b10 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.25.0-SNAPSHOT \ No newline at end of file +version=1.24.1 \ No newline at end of file diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index 6a310fb67b..b22d74e60f 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.25.0-SNAPSHOT +version=1.24.1 org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index 87a8f7cd08..d6ec860f25 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.1 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index a8730af0fb..6eca812b91 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.25.0" +version = "1.24.1" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index 808b8dbffb..f4a882e2da 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.25.0-SNAPSHOT +version=1.24.1 diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index 6566782c6c..3f967fd92a 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.25.0" +version = "1.24.1" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index d60617aabe..0c926b9ad8 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.25.0" +version = "1.24.1" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Wed, 6 Nov 2024 12:30:48 +0100 Subject: [PATCH 57/89] Prepare next development version 1.25.0-SNAPSHOT Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- integration/spark/gradle.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index 9c3b18cb3a..95d2528b0b 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.1 +version=1.25.0-SNAPSHOT org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index 944a076a18..f8c3268e05 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.24.1" +__version__ = "1.25.0" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index ce99eefab2..dd37d8f32a 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.24.1" +version = "1.25.0" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index dd407e1fa6..475504cb6a 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index 6647d21bd8..456bbbc940 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.1" +__version__ = "1.25.0" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index eb9dd64a37..f89379c072 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index ae82821f34..3ea320f109 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.1" +__version__ = "1.25.0" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index 6647d21bd8..456bbbc940 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.1" +__version__ = "1.25.0" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index f5aa0d20ec..c32d2e69d9 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index 694a3ddb32..97eb781747 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.1" +__version__ = "1.25.0" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index 6647d21bd8..456bbbc940 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.1" +__version__ = "1.25.0" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index 10a88c28d0..73aef8a906 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index aedbbac2ce..31928fe033 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.1" +__version__ = "1.25.0" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index 4e0fecb1b9..6b5bb9e0d4 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.24.1" +__version__ = "1.25.0" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index 163c95497c..bec3856aec 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 57fd1424f8..26ee4da301 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.1" +__version__ = "1.25.0" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index 73ce755b3e..9e063ce8f5 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.1 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index f2384841f0..c16abd53be 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.24.1 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index 3d8bc86b10..b986211c1f 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.24.1 \ No newline at end of file +version=1.25.0-SNAPSHOT \ No newline at end of file diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index b22d74e60f..6a310fb67b 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.24.1 +version=1.25.0-SNAPSHOT org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index d6ec860f25..87a8f7cd08 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.1 +current_version = 1.25.0 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index 6eca812b91..a8730af0fb 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.24.1" +version = "1.25.0" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index f4a882e2da..808b8dbffb 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.24.1 +version=1.25.0-SNAPSHOT diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index 3f967fd92a..6566782c6c 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.24.1" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index 0c926b9ad8..d60617aabe 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.24.1" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Wed, 6 Nov 2024 13:13:26 +0100 Subject: [PATCH 58/89] passthrough credentials to Flink build job (#3227) Signed-off-by: Maciej Obuchowski --- .circleci/continue_config.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.circleci/continue_config.yml b/.circleci/continue_config.yml index 86281da893..a61e6f22de 100644 --- a/.circleci/continue_config.yml +++ b/.circleci/continue_config.yml @@ -982,7 +982,12 @@ jobs: at: ~/ - set_java_version - run: chmod -R 777 data/iceberg/db - - run: ./gradlew --no-daemon --console=plain integrationTest --i -Pflink.version=<< parameters.flink-version >> + - run: | + # Get, then decode the GPG private key used to sign *.jar + export ORG_GRADLE_PROJECT_signingKey=$(echo $GPG_SIGNING_KEY | base64 -d) + export RELEASE_PASSWORD=$(echo $OSSRH_TOKEN_PASSWORD) + export RELEASE_USERNAME=$(echo $OSSRH_TOKEN_USERNAME) + ./gradlew --no-daemon --console=plain integrationTest --i -Pflink.version=<< parameters.flink-version >> - run: when: on_fail command: cat app/build/test-results/integrationTest/TEST-*.xml From e51beba00c5ba76f7e8da6992298184f64f4b541 Mon Sep 17 00:00:00 2001 From: Maciej Obuchowski Date: Wed, 6 Nov 2024 13:27:45 +0100 Subject: [PATCH 59/89] Prepare for release 1.24.2 Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- integration/flink/README.md | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- integration/spark/gradle.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 27 files changed, 27 insertions(+), 27 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index 95d2528b0b..080361b387 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.25.0-SNAPSHOT +version=1.24.2 org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index f8c3268e05..7aec6c999a 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.25.0" +__version__ = "1.24.2" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index dd37d8f32a..2d3de81495 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.25.0" +version = "1.24.2" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index 475504cb6a..f83e44c574 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index 456bbbc940..36e919405c 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.2" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index f89379c072..15249e4292 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index 3ea320f109..122e08a961 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.2" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index 456bbbc940..36e919405c 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.2" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index c32d2e69d9..a64a3c84bd 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index 97eb781747..e0d659df9e 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.2" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index 456bbbc940..36e919405c 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.25.0" +__version__ = "1.24.2" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index 73aef8a906..dc002cb467 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index 31928fe033..29e90d7a8b 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.2" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index 6b5bb9e0d4..acf17fcf56 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.25.0" +__version__ = "1.24.2" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index bec3856aec..04c14babfc 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 26ee4da301..42e491d93e 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.25.0" +__version__ = "1.24.2" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/README.md b/integration/flink/README.md index e00ab474dd..3d3efd0155 100644 --- a/integration/flink/README.md +++ b/integration/flink/README.md @@ -14,7 +14,7 @@ Maven: io.openlineage openlineage-flink - 1.24.1 + 1.24.2 ``` diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index 9e063ce8f5..c82e9cb78d 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.25.0-SNAPSHOT +version=1.24.2 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index c16abd53be..432537a882 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.25.0-SNAPSHOT +version=1.24.2 flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index b986211c1f..a44337cbd0 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.25.0-SNAPSHOT \ No newline at end of file +version=1.24.2 \ No newline at end of file diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index 6a310fb67b..c0d20fcf6c 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.25.0-SNAPSHOT +version=1.24.2 org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index 87a8f7cd08..9dc1ab4f17 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.25.0 +current_version = 1.24.2 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index a8730af0fb..a5d7f1e8ae 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.25.0" +version = "1.24.2" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index 808b8dbffb..47573280fd 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.25.0-SNAPSHOT +version=1.24.2 diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index 6566782c6c..418680fc2c 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.25.0" +version = "1.24.2" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index d60617aabe..c691247fc2 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.25.0" +version = "1.24.2" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Wed, 6 Nov 2024 13:27:51 +0100 Subject: [PATCH 60/89] Prepare next development version 1.25.0-SNAPSHOT Signed-off-by: Maciej Obuchowski --- client/java/gradle.properties | 2 +- client/python/openlineage/client/constants.py | 2 +- client/python/pyproject.toml | 2 +- client/python/setup.cfg | 2 +- integration/airflow/openlineage/airflow/version.py | 2 +- integration/airflow/setup.cfg | 2 +- integration/airflow/setup.py | 2 +- integration/common/openlineage/common/__init__.py | 2 +- integration/common/setup.cfg | 2 +- integration/common/setup.py | 2 +- integration/dagster/openlineage/dagster/__init__.py | 2 +- integration/dagster/setup.cfg | 2 +- integration/dagster/setup.py | 2 +- integration/dbt/scripts/dbt-ol | 2 +- integration/dbt/setup.cfg | 2 +- integration/dbt/setup.py | 2 +- integration/flink/examples/stateful/gradle.properties | 2 +- integration/flink/gradle.properties | 2 +- integration/spark-extension-interfaces/gradle.properties | 2 +- integration/spark/gradle.properties | 2 +- integration/sql/.bumpversion.cfg | 2 +- integration/sql/iface-java/Cargo.toml | 2 +- integration/sql/iface-java/gradle.properties | 2 +- integration/sql/iface-py/Cargo.toml | 2 +- integration/sql/impl/Cargo.toml | 2 +- proxy/backend/gradle.properties | 2 +- 26 files changed, 26 insertions(+), 26 deletions(-) diff --git a/client/java/gradle.properties b/client/java/gradle.properties index 080361b387..95d2528b0b 100644 --- a/client/java/gradle.properties +++ b/client/java/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.2 +version=1.25.0-SNAPSHOT org.gradle.caching=true org.gradle.jvmargs=-Xmx4096M \ No newline at end of file diff --git a/client/python/openlineage/client/constants.py b/client/python/openlineage/client/constants.py index 7aec6c999a..f8c3268e05 100644 --- a/client/python/openlineage/client/constants.py +++ b/client/python/openlineage/client/constants.py @@ -2,7 +2,7 @@ # SPDX-License-Identifier: Apache-2.0 from __future__ import annotations -__version__ = "1.24.2" +__version__ = "1.25.0" DEFAULT_TIMEOUT_MS = 5000 DEFAULT_NAMESPACE_NAME = "default" diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml index 2d3de81495..dd37d8f32a 100644 --- a/client/python/pyproject.toml +++ b/client/python/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [project] name = "openlineage-python" -version = "1.24.2" +version = "1.25.0" description = "OpenLineage Python Client" readme = "README.md" keywords = [ diff --git a/client/python/setup.cfg b/client/python/setup.cfg index f83e44c574..475504cb6a 100644 --- a/client/python/setup.cfg +++ b/client/python/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/openlineage/airflow/version.py b/integration/airflow/openlineage/airflow/version.py index 36e919405c..456bbbc940 100644 --- a/integration/airflow/openlineage/airflow/version.py +++ b/integration/airflow/openlineage/airflow/version.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.2" +__version__ = "1.25.0" diff --git a/integration/airflow/setup.cfg b/integration/airflow/setup.cfg index 15249e4292..f89379c072 100644 --- a/integration/airflow/setup.cfg +++ b/integration/airflow/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/airflow/setup.py b/integration/airflow/setup.py index 122e08a961..3ea320f109 100644 --- a/integration/airflow/setup.py +++ b/integration/airflow/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.2" +__version__ = "1.25.0" requirements = [ "attrs>=20.0", diff --git a/integration/common/openlineage/common/__init__.py b/integration/common/openlineage/common/__init__.py index 36e919405c..456bbbc940 100644 --- a/integration/common/openlineage/common/__init__.py +++ b/integration/common/openlineage/common/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.2" +__version__ = "1.25.0" diff --git a/integration/common/setup.cfg b/integration/common/setup.cfg index a64a3c84bd..c32d2e69d9 100644 --- a/integration/common/setup.cfg +++ b/integration/common/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/common/setup.py b/integration/common/setup.py index e0d659df9e..97eb781747 100644 --- a/integration/common/setup.py +++ b/integration/common/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.2" +__version__ = "1.25.0" project_urls = { diff --git a/integration/dagster/openlineage/dagster/__init__.py b/integration/dagster/openlineage/dagster/__init__.py index 36e919405c..456bbbc940 100644 --- a/integration/dagster/openlineage/dagster/__init__.py +++ b/integration/dagster/openlineage/dagster/__init__.py @@ -1,4 +1,4 @@ # Copyright 2018-2024 contributors to the OpenLineage project # SPDX-License-Identifier: Apache-2.0 -__version__ = "1.24.2" +__version__ = "1.25.0" diff --git a/integration/dagster/setup.cfg b/integration/dagster/setup.cfg index dc002cb467..73aef8a906 100644 --- a/integration/dagster/setup.cfg +++ b/integration/dagster/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dagster/setup.py b/integration/dagster/setup.py index 29e90d7a8b..31928fe033 100644 --- a/integration/dagster/setup.py +++ b/integration/dagster/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.2" +__version__ = "1.25.0" DAGSTER_VERSION = "1.0.0" diff --git a/integration/dbt/scripts/dbt-ol b/integration/dbt/scripts/dbt-ol index acf17fcf56..6b5bb9e0d4 100755 --- a/integration/dbt/scripts/dbt-ol +++ b/integration/dbt/scripts/dbt-ol @@ -23,7 +23,7 @@ from openlineage.common.provider.dbt import ( from openlineage.common.utils import parse_multiple_args, parse_single_arg from tqdm import tqdm -__version__ = "1.24.2" +__version__ = "1.25.0" PRODUCER = f"https://github.com/OpenLineage/OpenLineage/tree/{__version__}/integration/dbt" diff --git a/integration/dbt/setup.cfg b/integration/dbt/setup.cfg index 04c14babfc..bec3856aec 100644 --- a/integration/dbt/setup.cfg +++ b/integration/dbt/setup.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 commit = False tag = False parse = (?P\d+)\.(?P\d+)\.(?P\d+)(?P.*) diff --git a/integration/dbt/setup.py b/integration/dbt/setup.py index 42e491d93e..26ee4da301 100644 --- a/integration/dbt/setup.py +++ b/integration/dbt/setup.py @@ -10,7 +10,7 @@ with open("README.md") as readme_file: readme = readme_file.read() -__version__ = "1.24.2" +__version__ = "1.25.0" requirements = [ "tqdm>=4.62.0", diff --git a/integration/flink/examples/stateful/gradle.properties b/integration/flink/examples/stateful/gradle.properties index c82e9cb78d..9e063ce8f5 100644 --- a/integration/flink/examples/stateful/gradle.properties +++ b/integration/flink/examples/stateful/gradle.properties @@ -1,3 +1,3 @@ -version=1.24.2 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G \ No newline at end of file diff --git a/integration/flink/gradle.properties b/integration/flink/gradle.properties index 432537a882..c16abd53be 100644 --- a/integration/flink/gradle.properties +++ b/integration/flink/gradle.properties @@ -1,5 +1,5 @@ jdk8.build=true -version=1.24.2 +version=1.25.0-SNAPSHOT flink.version=1.19.0 org.gradle.jvmargs=-Xmx1G diff --git a/integration/spark-extension-interfaces/gradle.properties b/integration/spark-extension-interfaces/gradle.properties index a44337cbd0..b986211c1f 100644 --- a/integration/spark-extension-interfaces/gradle.properties +++ b/integration/spark-extension-interfaces/gradle.properties @@ -1 +1 @@ -version=1.24.2 \ No newline at end of file +version=1.25.0-SNAPSHOT \ No newline at end of file diff --git a/integration/spark/gradle.properties b/integration/spark/gradle.properties index c0d20fcf6c..6a310fb67b 100644 --- a/integration/spark/gradle.properties +++ b/integration/spark/gradle.properties @@ -1,4 +1,4 @@ -version=1.24.2 +version=1.25.0-SNAPSHOT org.gradle.jvmargs=-Xmx4G spark.version=3.3.4 diff --git a/integration/sql/.bumpversion.cfg b/integration/sql/.bumpversion.cfg index 9dc1ab4f17..87a8f7cd08 100644 --- a/integration/sql/.bumpversion.cfg +++ b/integration/sql/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.24.2 +current_version = 1.25.0 [bumpversion:file:iface-py/Cargo.toml] search = version = "{current_version}" diff --git a/integration/sql/iface-java/Cargo.toml b/integration/sql/iface-java/Cargo.toml index a5d7f1e8ae..a8730af0fb 100644 --- a/integration/sql/iface-java/Cargo.toml +++ b/integration/sql/iface-java/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_java" description = "Java interface for the Rust OpenLineage lineage extraction library" -version = "1.24.2" +version = "1.25.0" edition = "2021" authors = ["Piotr Wojtczak ", "Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "java", "jni"] diff --git a/integration/sql/iface-java/gradle.properties b/integration/sql/iface-java/gradle.properties index 47573280fd..808b8dbffb 100644 --- a/integration/sql/iface-java/gradle.properties +++ b/integration/sql/iface-java/gradle.properties @@ -1 +1 @@ -version=1.24.2 +version=1.25.0-SNAPSHOT diff --git a/integration/sql/iface-py/Cargo.toml b/integration/sql/iface-py/Cargo.toml index 418680fc2c..6566782c6c 100644 --- a/integration/sql/iface-py/Cargo.toml +++ b/integration/sql/iface-py/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql_python" description = "Python interface for the Rust OpenLineage lineage extraction library" -version = "1.24.2" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski "] keywords = ["sql", "lineage", "openlineage", "python"] diff --git a/integration/sql/impl/Cargo.toml b/integration/sql/impl/Cargo.toml index c691247fc2..d60617aabe 100644 --- a/integration/sql/impl/Cargo.toml +++ b/integration/sql/impl/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "openlineage_sql" description = "Library extracting source and destination tables from sql statements" -version = "1.24.2" +version = "1.25.0" edition = "2021" authors = ["Maciej Obuchowski ", "Piotr Wojtczak Date: Fri, 8 Nov 2024 11:33:50 +0100 Subject: [PATCH 61/89] Make edit link variable to version. (#3229) Edit link now depends on whether the document is current or versioned. Signed-off-by: Jakub Dardzinski --- website/docusaurus.config.js | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index 437bc951d9..a0990a51cd 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -68,8 +68,13 @@ const config = { docs: { sidebarPath: require.resolve('./sidebars.js'), exclude: ['**/partials/**'], - editUrl: - 'https://github.com/OpenLineage/OpenLineage/tree/main/website/', + editUrl: ({versionDocsDirPath, docPath, version}) => { + if (version === 'current') { + return `https://github.com/OpenLineage/OpenLineage/tree/main/website/docs/${docPath}`; + } else { + return `https://github.com/OpenLineage/openlineage-site/tree/main/${versionDocsDirPath}/${docPath}`; + } + } }, theme: { customCss: require.resolve('./src/css/custom.css'), From 0399867fc3cf29015c7de1f8923b49ec0b263617 Mon Sep 17 00:00:00 2001 From: Niels Janssen <130155733+NJA010@users.noreply.github.com> Date: Sat, 9 Nov 2024 00:25:48 +0100 Subject: [PATCH 62/89] [Integration][DBT] Fix compatibility with DBT v1.8 (#3228) * check manifest version for >=v12 to fix bug Signed-off-by: NJA010 * typo in test_metadata Signed-off-by: NJA010 * add test for manifest without test_metadata Signed-off-by: NJA010 --------- Signed-off-by: NJA010 --- .../common/provider/dbt/processor.py | 12 +- .../dbt/no_test_metadata/dbt_project.yml | 38 + .../tests/dbt/no_test_metadata/profiles.yml | 12 + .../tests/dbt/no_test_metadata/result.json | 1042 ++++ .../dbt/no_test_metadata/target/manifest.json | 5319 +++++++++++++++++ .../no_test_metadata/target/run_results.json | 378 ++ .../common/tests/dbt/test_dbt_local.py | 13 +- 7 files changed, 6809 insertions(+), 5 deletions(-) create mode 100644 integration/common/tests/dbt/no_test_metadata/dbt_project.yml create mode 100644 integration/common/tests/dbt/no_test_metadata/profiles.yml create mode 100644 integration/common/tests/dbt/no_test_metadata/result.json create mode 100644 integration/common/tests/dbt/no_test_metadata/target/manifest.json create mode 100644 integration/common/tests/dbt/no_test_metadata/target/run_results.json diff --git a/integration/common/openlineage/common/provider/dbt/processor.py b/integration/common/openlineage/common/provider/dbt/processor.py index 7db270ceb3..aba7ff9940 100644 --- a/integration/common/openlineage/common/provider/dbt/processor.py +++ b/integration/common/openlineage/common/provider/dbt/processor.py @@ -384,11 +384,19 @@ def parse_assertions( if node.startswith("model.") or node.startswith("source."): model_node = node + if self.manifest_version >= 12: + name = test_node["name"] + node_columns = test_node + + else: + name = test_node["test_metadata"]["name"] + node_columns = test_node["test_metadata"] + assertions[model_node].append( data_quality_assertions_dataset.Assertion( - assertion=test_node["test_metadata"]["name"], + assertion=name, success=True if run["status"] == "pass" else False, - column=get_from_nullable_chain(test_node["test_metadata"], ["kwargs", "column_name"]), + column=get_from_nullable_chain(node_columns, ["kwargs", "column_name"]), ) ) diff --git a/integration/common/tests/dbt/no_test_metadata/dbt_project.yml b/integration/common/tests/dbt/no_test_metadata/dbt_project.yml new file mode 100644 index 0000000000..07012acaa2 --- /dev/null +++ b/integration/common/tests/dbt/no_test_metadata/dbt_project.yml @@ -0,0 +1,38 @@ + +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: 'dbt_large_test' +version: '1.0.0' +config-version: 2 + +# This setting configures which "profile" 1t uses for this project. +profile: 'snowflake' + +# These configurations specify where dbt should look for different types of files. +# The `source-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +source-paths: ["models"] +analysis-paths: ["analysis"] +test-paths: ["tests"] +data-paths: ["data"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +target-path: "target" # directory which will store compiled SQL files +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_modules" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ directory +# as tables. These settings can be overridden in the individual model files +# using the {% raw %} `{{ config(...) }}` {% endraw %} macro. +models: + dbt_large_test: + # Applies to all files under models/example/ + example: + materialized: view diff --git a/integration/common/tests/dbt/no_test_metadata/profiles.yml b/integration/common/tests/dbt/no_test_metadata/profiles.yml new file mode 100644 index 0000000000..0f2ce7e23b --- /dev/null +++ b/integration/common/tests/dbt/no_test_metadata/profiles.yml @@ -0,0 +1,12 @@ +snowflake: + target: dev + outputs: + dev: + type: snowflake + account: ASDF1234.eu-central-1 + user: test + password: password + role: sysadmin + database: DEMO_DB + warehouse: COMPUTE_WH + schema: public diff --git a/integration/common/tests/dbt/no_test_metadata/result.json b/integration/common/tests/dbt/no_test_metadata/result.json new file mode 100644 index 0000000000..a5dc23405a --- /dev/null +++ b/integration/common/tests/dbt/no_test_metadata/result.json @@ -0,0 +1,1042 @@ +[ + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "6edf42ed-d8d0-454a-b819-d09b9067ff99", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.customers.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "START", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.customers", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_customers_customer_id", + "success": false, + "column": null + }, + { + "assertion": "not_null_customers_customer_id", + "success": false, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_customers_customer_id", + "success": false, + "column": null + }, + { + "assertion": "not_null_customers_customer_id", + "success": false, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "1a69c0a7-04bb-408b-980e-cbbfb1831ef7", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.orders.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "START", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.orders", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_customer_id", + "success": true, + "column": null + }, + { + "assertion": "relationships_orders_customer_id__customer_id__ref_customers_", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_credit_card_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_coupon_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_bank_transfer_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_gift_card_amount", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_customer_id", + "success": true, + "column": null + }, + { + "assertion": "relationships_orders_customer_id__customer_id__ref_customers_", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_credit_card_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_coupon_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_bank_transfer_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_gift_card_amount", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "f99310b4-339a-4381-ad3e-c1b95c24ff11", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_customers.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "START", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_customers", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_customers_customer_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_customers_customer_id", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_customers_customer_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_customers_customer_id", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "c11f2efd-4415-45fc-8081-10d2aaa594d2", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_payments.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "START", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_payments", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "b901441a-7b4a-4a97-aa61-a200106b3ce3", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_orders.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "START", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_orders", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "6edf42ed-d8d0-454a-b819-d09b9067ff99", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.customers.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "COMPLETE", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.customers", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_customers_customer_id", + "success": false, + "column": null + }, + { + "assertion": "not_null_customers_customer_id", + "success": false, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_customers_customer_id", + "success": false, + "column": null + }, + { + "assertion": "not_null_customers_customer_id", + "success": false, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "1a69c0a7-04bb-408b-980e-cbbfb1831ef7", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.orders.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "COMPLETE", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.orders", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_customer_id", + "success": true, + "column": null + }, + { + "assertion": "relationships_orders_customer_id__customer_id__ref_customers_", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_credit_card_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_coupon_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_bank_transfer_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_gift_card_amount", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_customer_id", + "success": true, + "column": null + }, + { + "assertion": "relationships_orders_customer_id__customer_id__ref_customers_", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_credit_card_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_coupon_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_bank_transfer_amount", + "success": true, + "column": null + }, + { + "assertion": "not_null_orders_gift_card_amount", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "f99310b4-339a-4381-ad3e-c1b95c24ff11", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_customers.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "COMPLETE", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_customers", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_customers_customer_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_customers_customer_id", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_customers_customer_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_customers_customer_id", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "c11f2efd-4415-45fc-8081-10d2aaa594d2", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_payments.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "COMPLETE", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_payments", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_payments_payment_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + }, + { + "eventTime": "2021-08-25T11:00:25.277467+00:00", + "producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "schemaURL": "https://openlineage.io/spec/2-0-2/OpenLineage.json#/$defs/RunEvent", + "run": { + "runId": "b901441a-7b4a-4a97-aa61-a200106b3ce3", + "facets": { + "dbt_version": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://github.com/OpenLineage/OpenLineage/tree/main/integration/common/openlineage/schema/dbt-version-run-facet.json", + "version": "1.8.5" + }, + "parent": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/ParentRunFacet.json#/$defs/ParentRunFacet", + "run": { + "runId": "f99310b4-3c3c-1a1a-2b2b-c1b95c24ff11" + }, + "job": { + "namespace": "dbt", + "name": "dbt-job-name" + } + } + } + }, + "job": { + "namespace": "dbt-test-namespace", + "name": "DEMO_DB.public.jaffle_shop.stg_orders.test", + "facets": { + "jobType": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/2-0-3/JobTypeJobFacet.json#/$defs/JobTypeJobFacet", + "_deleted": null, + "processingType": "BATCH", + "integration": "DBT", + "jobType": "TEST" + } + } + }, + "eventType": "COMPLETE", + "inputs": [ + { + "namespace": "snowflake://ASDF1234.eu-central-1.aws", + "name": "DEMO_DB.public.stg_orders", + "facets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + } + ] + } + }, + "inputFacets": { + "dataQualityAssertions": { + "_producer": "https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", + "_schemaURL": "https://openlineage.io/spec/facets/1-0-1/DataQualityAssertionsDatasetFacet.json#/$defs/DataQualityAssertionsDatasetFacet", + "assertions": [ + { + "assertion": "unique_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "not_null_stg_orders_order_id", + "success": true, + "column": null + }, + { + "assertion": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", + "success": true, + "column": null + } + ] + } + } + } + ], + "outputs": [] + } +] \ No newline at end of file diff --git a/integration/common/tests/dbt/no_test_metadata/target/manifest.json b/integration/common/tests/dbt/no_test_metadata/target/manifest.json new file mode 100644 index 0000000000..6e1cb6229d --- /dev/null +++ b/integration/common/tests/dbt/no_test_metadata/target/manifest.json @@ -0,0 +1,5319 @@ +{ + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", + "dbt_version": "1.8.6", + "generated_at": "2021-07-05T11:43:09.384637Z", + "invocation_id": "ebb384bf-f2f3-4303-bb1b-98b1daa6d2d4", + "env": {}, + "project_id": "06e5b98c2db46f8a72cc4f66410e9b3b", + "user_id": "7bae5953-769e-4aa1-81f6-55a82ac4d4d4", + "send_anonymous_usage_stats": true, + "adapter_type": "snowflake" + }, + "nodes": { + "model.jaffle_shop.customers": { + "raw_sql": "with customers as (\n\n select * from {{ ref('stg_customers') }}\n\n),\n\norders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by 1\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders using (order_id)\n\n group by 1\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders using (customer_id)\n\n left join customer_payments using (customer_id)\n\n)\n\nselect * from final", + "compiled": true, + "resource_type": "model", + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"], + "nodes": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"] + }, + "config": { + "enabled": true, + "materialized": "table", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "customers"], + "unique_id": "model.jaffle_shop.customers", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "customers.sql", + "original_file_path": "models/customers.sql", + "name": "customers", + "alias": "customers", + "checksum": { + "name": "sha256", + "checksum": "7f193a2c3af2faa53e0bb7b75d2663f39db8c6b3913e9cafd245dc62f98a8d09" + }, + "tags": [], + "refs": [ + ["stg_customers"], + ["stg_orders"], + ["stg_payments"] + ], + "sources": [], + "description": "This table has basic information about a customer, as well as some derived facts based on a customer's orders", + "columns": { + "customer_id": { + "name": "customer_id", + "description": "This is a unique identifier for a customer", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "first_name": { + "name": "first_name", + "description": "Customer's first name. PII.", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "last_name": { + "name": "last_name", + "description": "Customer's last name. PII.", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "first_order": { + "name": "first_order", + "description": "Date (UTC) of a customer's first order", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "most_recent_order": { + "name": "most_recent_order", + "description": "Date (UTC) of a customer's most recent order", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "number_of_orders": { + "name": "number_of_orders", + "description": "Count of the number of orders a customer has placed", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "total_order_amount": { + "name": "total_order_amount", + "description": "Total value (AUD) of a customer's orders", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + } + }, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": "jaffle_shop://models/schema.yml", + "compiled_path": "target/compiled/jaffle_shop/models/customers.sql", + "build_path": "target/run/jaffle_shop/models/customers.sql", + "deferred": false, + "unrendered_config": { + "materialized": "table" + }, + "created_at": 1625485389, + "compiled_sql": "with customers as (\n\n select * from DEMO_DB.public.stg_customers\n\n),\n\norders as (\n\n select * from DEMO_DB.public.stg_orders\n\n),\n\npayments as (\n\n select * from DEMO_DB.public.stg_payments\n\n),\n\ncustomer_orders as (\n\n select\n customer_id,\n\n min(order_date) as first_order,\n max(order_date) as most_recent_order,\n count(order_id) as number_of_orders\n from orders\n\n group by 1\n\n),\n\ncustomer_payments as (\n\n select\n orders.customer_id,\n sum(amount) as total_amount\n\n from payments\n\n left join orders using (order_id)\n\n group by 1\n\n),\n\nfinal as (\n\n select\n customers.customer_id,\n customers.first_name,\n customers.last_name,\n customer_orders.first_order,\n customer_orders.most_recent_order,\n customer_orders.number_of_orders,\n customer_payments.total_amount as customer_lifetime_value\n\n from customers\n\n left join customer_orders using (customer_id)\n\n left join customer_payments using (customer_id)\n\n)\n\nselect * from final", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "DEMO_DB.public.customers" + }, + "model.jaffle_shop.orders": { + "raw_sql": "{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %}\n\nwith orders as (\n\n select * from {{ ref('stg_orders') }}\n\n),\n\npayments as (\n\n select * from {{ ref('stg_payments') }}\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n {% for payment_method in payment_methods -%}\n sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount,\n {% endfor -%}\n\n sum(amount) as total_amount\n\n from payments\n\n group by 1\n\n),\n\nfinal as (\n\n select\n orders.order_id,\n orders.customer_id,\n orders.order_date,\n orders.status,\n\n {% for payment_method in payment_methods -%}\n\n order_payments.{{ payment_method }}_amount,\n\n {% endfor -%}\n\n order_payments.total_amount as amount\n\n from orders\n\n left join order_payments using (order_id)\n\n)\n\nselect * from final", + "compiled": true, + "resource_type": "model", + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"], + "nodes": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"] + }, + "config": { + "enabled": true, + "materialized": "table", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "orders"], + "unique_id": "model.jaffle_shop.orders", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "orders.sql", + "original_file_path": "models/orders.sql", + "name": "orders", + "alias": "orders", + "checksum": { + "name": "sha256", + "checksum": "ec3e8884f18110dd6d9b1ababdd85a6c04bf665ee0f57cade273e442f90e9994" + }, + "tags": [], + "refs": [ + ["stg_orders"], + ["stg_payments"] + ], + "sources": [], + "description": "This table has basic information about orders, as well as some derived facts based on payments", + "columns": { + "order_id": { + "name": "order_id", + "description": "This is a unique identifier for an order", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "customer_id": { + "name": "customer_id", + "description": "Foreign key to the customers table", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "order_date": { + "name": "order_date", + "description": "Date (UTC) that the order was placed", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "status": { + "name": "status", + "description": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "amount": { + "name": "amount", + "description": "Total amount (AUD) of the order", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "credit_card_amount": { + "name": "credit_card_amount", + "description": "Amount of the order (AUD) paid for by credit card", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "coupon_amount": { + "name": "coupon_amount", + "description": "Amount of the order (AUD) paid for by coupon", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "bank_transfer_amount": { + "name": "bank_transfer_amount", + "description": "Amount of the order (AUD) paid for by bank transfer", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "gift_card_amount": { + "name": "gift_card_amount", + "description": "Amount of the order (AUD) paid for by gift card", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + } + }, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": "jaffle_shop://models/schema.yml", + "compiled_path": "target/compiled/jaffle_shop/models/orders.sql", + "build_path": "target/run/jaffle_shop/models/orders.sql", + "deferred": false, + "unrendered_config": { + "materialized": "table" + }, + "created_at": 1625485389, + "compiled_sql": "\n\nwith orders as (\n\n select * from DEMO_DB.public.stg_orders\n\n),\n\npayments as (\n\n select * from DEMO_DB.public.stg_payments\n\n),\n\norder_payments as (\n\n select\n order_id,\n\n sum(case when payment_method = 'credit_card' then amount else 0 end) as credit_card_amount,\n sum(case when payment_method = 'coupon' then amount else 0 end) as coupon_amount,\n sum(case when payment_method = 'bank_transfer' then amount else 0 end) as bank_transfer_amount,\n sum(case when payment_method = 'gift_card' then amount else 0 end) as gift_card_amount,\n sum(amount) as total_amount\n\n from payments\n\n group by 1\n\n),\n\nfinal as (\n\n select\n orders.order_id,\n orders.customer_id,\n orders.order_date,\n orders.status,\n\n order_payments.credit_card_amount,\n\n order_payments.coupon_amount,\n\n order_payments.bank_transfer_amount,\n\n order_payments.gift_card_amount,\n\n order_payments.total_amount as amount\n\n from orders\n\n left join order_payments using (order_id)\n\n)\n\nselect * from final", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "DEMO_DB.public.orders" + }, + "model.jaffle_shop.stg_customers": { + "raw_sql": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_customers') }}\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", + "compiled": true, + "resource_type": "model", + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.create_or_replace_view", "macro.dbt.persist_docs"], + "nodes": ["seed.jaffle_shop.raw_customers"] + }, + "config": { + "enabled": true, + "materialized": "view", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "staging", "stg_customers"], + "unique_id": "model.jaffle_shop.stg_customers", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "staging/stg_customers.sql", + "original_file_path": "models/staging/stg_customers.sql", + "name": "stg_customers", + "alias": "stg_customers", + "checksum": { + "name": "sha256", + "checksum": "6f18a29204dad1de6dbb0c288144c4990742e0a1e065c3b2a67b5f98334c22ba" + }, + "tags": [], + "refs": [ + ["raw_customers"] + ], + "sources": [], + "description": "", + "columns": { + "customer_id": { + "name": "customer_id", + "description": "", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + } + }, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": "jaffle_shop://models/staging/schema.yml", + "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_customers.sql", + "build_path": "target/run/jaffle_shop/models/staging/stg_customers.sql", + "deferred": false, + "unrendered_config": { + "materialized": "view" + }, + "created_at": 1625485389, + "compiled_sql": "with source as (\n select * from DEMO_DB.public.raw_customers\n\n),\n\nrenamed as (\n\n select\n id as customer_id,\n first_name,\n last_name\n\n from source\n\n)\n\nselect * from renamed", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "DEMO_DB.public.stg_customers" + }, + "model.jaffle_shop.stg_payments": { + "raw_sql": "with source as (\n \n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_payments') }}\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n --`amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", + "compiled": true, + "resource_type": "model", + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.create_or_replace_view", "macro.dbt.persist_docs"], + "nodes": ["seed.jaffle_shop.raw_payments"] + }, + "config": { + "enabled": true, + "materialized": "view", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "staging", "stg_payments"], + "unique_id": "model.jaffle_shop.stg_payments", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "staging/stg_payments.sql", + "original_file_path": "models/staging/stg_payments.sql", + "name": "stg_payments", + "alias": "stg_payments", + "checksum": { + "name": "sha256", + "checksum": "113502ed19f04efb2af0629ff139f57f7463347b6d5218f3b80a8d128cc96852" + }, + "tags": [], + "refs": [ + ["raw_payments"] + ], + "sources": [], + "description": "", + "columns": { + "payment_id": { + "name": "payment_id", + "description": "", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "payment_method": { + "name": "payment_method", + "description": "", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + } + }, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": "jaffle_shop://models/staging/schema.yml", + "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_payments.sql", + "build_path": "target/run/jaffle_shop/models/staging/stg_payments.sql", + "deferred": false, + "unrendered_config": { + "materialized": "view" + }, + "created_at": 1625485389, + "compiled_sql": "with source as (\n select * from DEMO_DB.public.raw_payments\n\n),\n\nrenamed as (\n\n select\n id as payment_id,\n order_id,\n payment_method,\n\n --`amount` is currently stored in cents, so we convert it to dollars\n amount / 100 as amount\n\n from source\n\n)\n\nselect * from renamed", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "DEMO_DB.public.stg_payments" + }, + "model.jaffle_shop.stg_orders": { + "raw_sql": "with source as (\n\n {#-\n Normally we would select from the table here, but we are using seeds to load\n our data in this project\n #}\n select * from {{ ref('raw_orders') }}\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", + "compiled": true, + "resource_type": "model", + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.create_or_replace_view", "macro.dbt.persist_docs"], + "nodes": ["seed.jaffle_shop.raw_orders"] + }, + "config": { + "enabled": true, + "materialized": "view", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "staging", "stg_orders"], + "unique_id": "model.jaffle_shop.stg_orders", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "staging/stg_orders.sql", + "original_file_path": "models/staging/stg_orders.sql", + "name": "stg_orders", + "alias": "stg_orders", + "checksum": { + "name": "sha256", + "checksum": "afffa9cbc57e5fd2cf5898ebf571d444a62c9d6d7929d8133d30567fb9a2ce97" + }, + "tags": [], + "refs": [ + ["raw_orders"] + ], + "sources": [], + "description": "", + "columns": { + "order_id": { + "name": "order_id", + "description": "", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + }, + "status": { + "name": "status", + "description": "", + "meta": {}, + "data_type": null, + "quote": null, + "tags": [] + } + }, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": "jaffle_shop://models/staging/schema.yml", + "compiled_path": "target/compiled/jaffle_shop/models/staging/stg_orders.sql", + "build_path": "target/run/jaffle_shop/models/staging/stg_orders.sql", + "deferred": false, + "unrendered_config": { + "materialized": "view" + }, + "created_at": 1625485389, + "compiled_sql": "with source as (\n select * from DEMO_DB.public.raw_orders\n\n),\n\nrenamed as (\n\n select\n id as order_id,\n user_id as customer_id,\n order_date,\n status\n\n from source\n\n)\n\nselect * from renamed", + "extra_ctes_injected": true, + "extra_ctes": [], + "relation_name": "DEMO_DB.public.stg_orders" + }, + "seed.jaffle_shop.raw_customers": { + "raw_sql": "", + "resource_type": "seed", + "depends_on": { + "macros": [], + "nodes": [] + }, + "config": { + "enabled": true, + "materialized": "seed", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "quote_columns": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "raw_customers"], + "unique_id": "seed.jaffle_shop.raw_customers", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "raw_customers.csv", + "original_file_path": "data/raw_customers.csv", + "name": "raw_customers", + "alias": "raw_customers", + "checksum": { + "name": "sha256", + "checksum": "24579b4b26098d43265376f3c50be8b10faf8e8fd95f5508074f10f76a12671d" + }, + "tags": [], + "refs": [], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389 + }, + "seed.jaffle_shop.raw_orders": { + "raw_sql": "", + "resource_type": "seed", + "depends_on": { + "macros": [], + "nodes": [] + }, + "config": { + "enabled": true, + "materialized": "seed", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "quote_columns": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "raw_orders"], + "unique_id": "seed.jaffle_shop.raw_orders", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "raw_orders.csv", + "original_file_path": "data/raw_orders.csv", + "name": "raw_orders", + "alias": "raw_orders", + "checksum": { + "name": "sha256", + "checksum": "ee6c68d1639ec2b23a4495ec12475e09b8ed4b61e23ab0411ea7ec76648356f7" + }, + "tags": [], + "refs": [], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389 + }, + "seed.jaffle_shop.raw_payments": { + "raw_sql": "", + "resource_type": "seed", + "depends_on": { + "macros": [], + "nodes": [] + }, + "config": { + "enabled": true, + "materialized": "seed", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": null, + "database": null, + "tags": [], + "full_refresh": null, + "quote_columns": null, + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public", + "fqn": ["jaffle_shop", "raw_payments"], + "unique_id": "seed.jaffle_shop.raw_payments", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "raw_payments.csv", + "original_file_path": "data/raw_payments.csv", + "name": "raw_payments", + "alias": "raw_payments", + "checksum": { + "name": "sha256", + "checksum": "03fd407f3135f84456431a923f22fc185a2154079e210c20b690e3ab11687d11" + }, + "tags": [], + "refs": [], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389 + }, + "test.jaffle_shop.unique_customers_customer_id.d48e126d80": { + "raw_sql": "{{ test_unique(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_unique"], + "nodes": ["model.jaffle_shop.customers"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "unique_customers_customer_id"], + "unique_id": "test.jaffle_shop.unique_customers_customer_id.d48e126d80", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/unique_customers_customer_id.sql", + "original_file_path": "models/schema.yml", + "name": "unique_customers_customer_id", + "alias": "unique_customers_customer_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["customers"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.not_null_customers_customer_id.923d2d910a": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.customers"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_customers_customer_id"], + "unique_id": "test.jaffle_shop.not_null_customers_customer_id.923d2d910a", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_customers_customer_id.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_customers_customer_id", + "alias": "not_null_customers_customer_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["customers"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.unique_orders_order_id.0d77ddcf59": { + "raw_sql": "{{ test_unique(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_unique"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "unique_orders_order_id"], + "unique_id": "test.jaffle_shop.unique_orders_order_id.0d77ddcf59", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/unique_orders_order_id.sql", + "original_file_path": "models/schema.yml", + "name": "unique_orders_order_id", + "alias": "unique_orders_order_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "order_id" + }, + "test.jaffle_shop.not_null_orders_order_id.4daff5eed7": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_order_id"], + "unique_id": "test.jaffle_shop.not_null_orders_order_id.4daff5eed7", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_order_id.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_order_id", + "alias": "not_null_orders_order_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "order_id" + }, + "test.jaffle_shop.not_null_orders_customer_id.70722cc05f": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_customer_id"], + "unique_id": "test.jaffle_shop.not_null_orders_customer_id.70722cc05f", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_customer_id.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_customer_id", + "alias": "not_null_orders_customer_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4": { + "raw_sql": "{{ test_relationships(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_relationships", "macro.dbt.default__test_relationships"], + "nodes": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "relationships_orders_customer_id__customer_id__ref_customers_"], + "unique_id": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/relationships_orders_customer_id__customer_id__ref_customers_.sql", + "original_file_path": "models/schema.yml", + "name": "relationships_orders_customer_id__customer_id__ref_customers_", + "alias": "relationships_orders_customer_id__customer_id__ref_customers_", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["customers"], + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93": { + "raw_sql": "{{ test_accepted_values(**_dbt_schema_test_kwargs) }}{{ config(alias=\"accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758\") }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_accepted_values", "macro.dbt.default__test_accepted_values"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "accepted_values_orders_status__placed__shipped__completed__return_pending__returned"], + "unique_id": "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758.sql", + "original_file_path": "models/schema.yml", + "name": "accepted_values_orders_status__placed__shipped__completed__return_pending__returned", + "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": { + "alias": "accepted_values_orders_1ce6ab157c285f7cd2ac656013faf758" + }, + "created_at": 1625485389, + "column_name": "status" + }, + "test.jaffle_shop.not_null_orders_amount.f7bae8de1b": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_amount"], + "unique_id": "test.jaffle_shop.not_null_orders_amount.f7bae8de1b", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_amount.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_amount", + "alias": "not_null_orders_amount", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "amount" + }, + "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_credit_card_amount"], + "unique_id": "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_credit_card_amount.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_credit_card_amount", + "alias": "not_null_orders_credit_card_amount", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "credit_card_amount" + }, + "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_coupon_amount"], + "unique_id": "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_coupon_amount.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_coupon_amount", + "alias": "not_null_orders_coupon_amount", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "coupon_amount" + }, + "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_bank_transfer_amount"], + "unique_id": "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_bank_transfer_amount.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_bank_transfer_amount", + "alias": "not_null_orders_bank_transfer_amount", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "bank_transfer_amount" + }, + "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_orders_gift_card_amount"], + "unique_id": "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_orders_gift_card_amount.sql", + "original_file_path": "models/schema.yml", + "name": "not_null_orders_gift_card_amount", + "alias": "not_null_orders_gift_card_amount", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "gift_card_amount" + }, + "test.jaffle_shop.unique_stg_customers_customer_id.5530022331": { + "raw_sql": "{{ test_unique(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_unique"], + "nodes": ["model.jaffle_shop.stg_customers"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "unique_stg_customers_customer_id"], + "unique_id": "test.jaffle_shop.unique_stg_customers_customer_id.5530022331", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/unique_stg_customers_customer_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "unique_stg_customers_customer_id", + "alias": "unique_stg_customers_customer_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_customers"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.stg_customers"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_stg_customers_customer_id"], + "unique_id": "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_stg_customers_customer_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "not_null_stg_customers_customer_id", + "alias": "not_null_stg_customers_customer_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_customers"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "customer_id" + }, + "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48": { + "raw_sql": "{{ test_unique(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_unique"], + "nodes": ["model.jaffle_shop.stg_orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "unique_stg_orders_order_id"], + "unique_id": "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/unique_stg_orders_order_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "unique_stg_orders_order_id", + "alias": "unique_stg_orders_order_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "order_id" + }, + "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.stg_orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_stg_orders_order_id"], + "unique_id": "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_stg_orders_order_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "not_null_stg_orders_order_id", + "alias": "not_null_stg_orders_order_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "order_id" + }, + "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f": { + "raw_sql": "{{ test_accepted_values(**_dbt_schema_test_kwargs) }}{{ config(alias=\"accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58\") }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_accepted_values", "macro.dbt.default__test_accepted_values"], + "nodes": ["model.jaffle_shop.stg_orders"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned"], + "unique_id": "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58.sql", + "original_file_path": "models/staging/schema.yml", + "name": "accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned", + "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_orders"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": { + "alias": "accepted_values_stg_orders_4f514bf94b77b7ea437830eec4421c58" + }, + "created_at": 1625485389, + "column_name": "status" + }, + "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6": { + "raw_sql": "{{ test_unique(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_unique"], + "nodes": ["model.jaffle_shop.stg_payments"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "unique_stg_payments_payment_id"], + "unique_id": "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/unique_stg_payments_payment_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "unique_stg_payments_payment_id", + "alias": "unique_stg_payments_payment_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_payments"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "payment_id" + }, + "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012": { + "raw_sql": "{{ test_not_null(**_dbt_schema_test_kwargs) }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_not_null"], + "nodes": ["model.jaffle_shop.stg_payments"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": null, + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "not_null_stg_payments_payment_id"], + "unique_id": "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/not_null_stg_payments_payment_id.sql", + "original_file_path": "models/staging/schema.yml", + "name": "not_null_stg_payments_payment_id", + "alias": "not_null_stg_payments_payment_id", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_payments"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": {}, + "created_at": 1625485389, + "column_name": "payment_id" + }, + "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081": { + "raw_sql": "{{ test_accepted_values(**_dbt_schema_test_kwargs) }}{{ config(alias=\"accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef\") }}", + "resource_type": "test", + "depends_on": { + "macros": ["macro.dbt.test_accepted_values", "macro.dbt.default__test_accepted_values"], + "nodes": ["model.jaffle_shop.stg_payments"] + }, + "config": { + "enabled": true, + "materialized": "test", + "persist_docs": {}, + "vars": {}, + "quoting": {}, + "column_types": {}, + "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", + "schema": "dbt_test__audit", + "database": null, + "tags": [], + "full_refresh": null, + "severity": "ERROR", + "store_failures": null, + "where": null, + "limit": null, + "fail_calc": "count(*)", + "warn_if": "!= 0", + "error_if": "!= 0", + "post-hook": [], + "pre-hook": [] + }, + "database": "DEMO_DB", + "schema": "public_dbt_test__audit", + "fqn": ["jaffle_shop", "schema_test", "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card"], + "unique_id": "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "schema_test/accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef.sql", + "original_file_path": "models/staging/schema.yml", + "name": "accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card", + "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef", + "checksum": { + "name": "none", + "checksum": "" + }, + "tags": ["schema"], + "refs": [ + ["stg_payments"] + ], + "sources": [], + "description": "", + "columns": {}, + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "compiled_path": null, + "build_path": null, + "deferred": false, + "unrendered_config": { + "alias": "accepted_values_stg_payments_c7909fb19b1f0177c2bf99c7912f06ef" + }, + "created_at": 1625485389, + "column_name": "payment_method" + } + }, + "sources": {}, + "macros": { + "macro.dbt_snowflake.snowflake__get_catalog": { + "unique_id": "macro.dbt_snowflake.snowflake__get_catalog", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/catalog.sql", + "original_file_path": "macros/catalog.sql", + "name": "snowflake__get_catalog", + "macro_sql": "{% macro snowflake__get_catalog(information_schema, schemas) -%}\n {% set query %}\n with tables as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n table_type as \"table_type\",\n comment as \"table_comment\",\n\n -- note: this is the _role_ that owns the table\n table_owner as \"table_owner\",\n\n 'Clustering Key' as \"stats:clustering_key:label\",\n clustering_key as \"stats:clustering_key:value\",\n 'The key used to cluster this table' as \"stats:clustering_key:description\",\n (clustering_key is not null) as \"stats:clustering_key:include\",\n\n 'Row Count' as \"stats:row_count:label\",\n row_count as \"stats:row_count:value\",\n 'An approximate count of rows in this table' as \"stats:row_count:description\",\n (row_count is not null) as \"stats:row_count:include\",\n\n 'Approximate Size' as \"stats:bytes:label\",\n bytes as \"stats:bytes:value\",\n 'Approximate size of the table as reported by Snowflake' as \"stats:bytes:description\",\n (bytes is not null) as \"stats:bytes:include\",\n\n 'Last Modified' as \"stats:last_modified:label\",\n to_varchar(convert_timezone('UTC', last_altered), 'yyyy-mm-dd HH24:MI'||'UTC') as \"stats:last_modified:value\",\n 'The timestamp for last update/change' as \"stats:last_modified:description\",\n (last_altered is not null and table_type='BASE TABLE') as \"stats:last_modified:include\"\n\n from {{ information_schema }}.tables\n\n ),\n\n columns as (\n\n select\n table_catalog as \"table_database\",\n table_schema as \"table_schema\",\n table_name as \"table_name\",\n\n column_name as \"column_name\",\n ordinal_position as \"column_index\",\n data_type as \"column_type\",\n comment as \"column_comment\"\n\n from {{ information_schema }}.columns\n )\n\n select *\n from tables\n join columns using (\"table_database\", \"table_schema\", \"table_name\")\n where (\n {%- for schema in schemas -%}\n upper(\"table_schema\") = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n order by \"column_index\"\n {%- endset -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__create_table_as": { + "unique_id": "macro.dbt_snowflake.snowflake__create_table_as", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__create_table_as", + "macro_sql": "{% macro snowflake__create_table_as(temporary, relation, sql) -%}\n {%- set transient = config.get('transient', default=true) -%}\n {%- set cluster_by_keys = config.get('cluster_by', default=none) -%}\n {%- set enable_automatic_clustering = config.get('automatic_clustering', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n\n {%- if cluster_by_keys is not none and cluster_by_keys is string -%}\n {%- set cluster_by_keys = [cluster_by_keys] -%}\n {%- endif -%}\n {%- if cluster_by_keys is not none -%}\n {%- set cluster_by_string = cluster_by_keys|join(\", \")-%}\n {% else %}\n {%- set cluster_by_string = none -%}\n {%- endif -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace {% if temporary -%}\n temporary\n {%- elif transient -%}\n transient\n {%- endif %} table {{ relation }} {% if copy_grants and not temporary -%} copy grants {%- endif %} as\n (\n {%- if cluster_by_string is not none -%}\n select * from(\n {{ sql }}\n ) order by ({{ cluster_by_string }})\n {%- else -%}\n {{ sql }}\n {%- endif %}\n );\n {% if cluster_by_string is not none and not temporary -%}\n alter table {{relation}} cluster by ({{cluster_by_string}});\n {%- endif -%}\n {% if enable_automatic_clustering and cluster_by_string is not none and not temporary -%}\n alter table {{relation}} resume recluster;\n {%- endif -%}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__create_view_as": { + "unique_id": "macro.dbt_snowflake.snowflake__create_view_as", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__create_view_as", + "macro_sql": "{% macro snowflake__create_view_as(relation, sql) -%}\n {%- set secure = config.get('secure', default=false) -%}\n {%- set copy_grants = config.get('copy_grants', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create or replace {% if secure -%}\n secure\n {%- endif %} view {{ relation }} {% if copy_grants -%} copy grants {%- endif %} as (\n {{ sql }}\n );\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__get_columns_in_relation": { + "unique_id": "macro.dbt_snowflake.snowflake__get_columns_in_relation", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__get_columns_in_relation", + "macro_sql": "{% macro snowflake__get_columns_in_relation(relation) -%}\n {%- set sql -%}\n describe table {{ relation }}\n {%- endset -%}\n {%- set result = run_query(sql) -%}\n\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many columns in relation {{ relation }}! dbt can only get\n information about relations with fewer than {{ maximum }} columns.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n\n {% set columns = [] %}\n {% for row in result %}\n {% do columns.append(api.Column.from_description(row['name'], row['type'])) %}\n {% endfor %}\n {% do return(columns) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__list_schemas": { + "unique_id": "macro.dbt_snowflake.snowflake__list_schemas", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__list_schemas", + "macro_sql": "{% macro snowflake__list_schemas(database) -%}\n {# 10k limit from here: https://docs.snowflake.net/manuals/sql-reference/sql/show-schemas.html#usage-notes #}\n {% set maximum = 10000 %}\n {% set sql -%}\n show terse schemas in database {{ database }}\n limit {{ maximum }}\n {%- endset %}\n {% set result = run_query(sql) %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in database {{ database }}! dbt can only get\n information about databases with fewer than {{ maximum }} schemas.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {{ return(result) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__list_relations_without_caching": { + "unique_id": "macro.dbt_snowflake.snowflake__list_relations_without_caching", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__list_relations_without_caching", + "macro_sql": "{% macro snowflake__list_relations_without_caching(schema_relation) %}\n {%- set sql -%}\n show terse objects in {{ schema_relation }}\n {%- endset -%}\n\n {%- set result = run_query(sql) -%}\n {% set maximum = 10000 %}\n {% if (result | length) >= maximum %}\n {% set msg %}\n Too many schemas in schema {{ schema_relation }}! dbt can only get\n information about schemas with fewer than {{ maximum }} objects.\n {% endset %}\n {% do exceptions.raise_compiler_error(msg) %}\n {% endif %}\n {%- do return(result) -%}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__check_schema_exists": { + "unique_id": "macro.dbt_snowflake.snowflake__check_schema_exists", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__check_schema_exists", + "macro_sql": "{% macro snowflake__check_schema_exists(information_schema, schema) -%}\n {% call statement('check_schema_exists', fetch_result=True) -%}\n select count(*)\n from {{ information_schema }}.schemata\n where upper(schema_name) = upper('{{ schema }}')\n and upper(catalog_name) = upper('{{ information_schema.database }}')\n {%- endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__current_timestamp": { + "unique_id": "macro.dbt_snowflake.snowflake__current_timestamp", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__current_timestamp", + "macro_sql": "{% macro snowflake__current_timestamp() -%}\n convert_timezone('UTC', current_timestamp())\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__snapshot_string_as_time": { + "unique_id": "macro.dbt_snowflake.snowflake__snapshot_string_as_time", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__snapshot_string_as_time", + "macro_sql": "{% macro snowflake__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"to_timestamp_ntz('\" ~ timestamp ~ \"')\" -%}\n {{ return(result) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__snapshot_get_time": { + "unique_id": "macro.dbt_snowflake.snowflake__snapshot_get_time", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__snapshot_get_time", + "macro_sql": "{% macro snowflake__snapshot_get_time() -%}\n to_timestamp_ntz({{ current_timestamp() }})\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.current_timestamp"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__rename_relation": { + "unique_id": "macro.dbt_snowflake.snowflake__rename_relation", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__rename_relation", + "macro_sql": "{% macro snowflake__rename_relation(from_relation, to_relation) -%}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ to_relation }}\n {%- endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__alter_column_type": { + "unique_id": "macro.dbt_snowflake.snowflake__alter_column_type", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__alter_column_type", + "macro_sql": "{% macro snowflake__alter_column_type(relation, column_name, new_column_type) -%}\n {% call statement('alter_column_type') %}\n alter table {{ relation }} alter {{ adapter.quote(column_name) }} set data type {{ new_column_type }};\n {% endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__alter_relation_comment": { + "unique_id": "macro.dbt_snowflake.snowflake__alter_relation_comment", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__alter_relation_comment", + "macro_sql": "{% macro snowflake__alter_relation_comment(relation, relation_comment) -%}\n comment on {{ relation.type }} {{ relation }} IS $${{ relation_comment | replace('$', '[$]') }}$$;\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__alter_column_comment": { + "unique_id": "macro.dbt_snowflake.snowflake__alter_column_comment", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "snowflake__alter_column_comment", + "macro_sql": "{% macro snowflake__alter_column_comment(relation, column_dict) -%}\n {% for column_name in column_dict %}\n comment if exists on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is $${{ column_dict[column_name]['description'] | replace('$', '[$]') }}$$;\n {% endfor %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.get_current_query_tag": { + "unique_id": "macro.dbt_snowflake.get_current_query_tag", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "get_current_query_tag", + "macro_sql": "{% macro get_current_query_tag() -%}\n {{ return(run_query(\"show parameters like 'query_tag' in session\").rows[0]['value']) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.set_query_tag": { + "unique_id": "macro.dbt_snowflake.set_query_tag", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "set_query_tag", + "macro_sql": "{% macro set_query_tag() -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% set original_query_tag = get_current_query_tag() %}\n {{ log(\"Setting query_tag to '\" ~ new_query_tag ~ \"'. Will reset to '\" ~ original_query_tag ~ \"' after materialization.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(new_query_tag)) %}\n {{ return(original_query_tag)}}\n {% endif %}\n {{ return(none)}}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.get_current_query_tag", "macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.unset_query_tag": { + "unique_id": "macro.dbt_snowflake.unset_query_tag", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/adapters.sql", + "original_file_path": "macros/adapters.sql", + "name": "unset_query_tag", + "macro_sql": "{% macro unset_query_tag(original_query_tag) -%}\n {% set new_query_tag = config.get('query_tag') %}\n {% if new_query_tag %}\n {% if original_query_tag %}\n {{ log(\"Resetting query_tag to '\" ~ original_query_tag ~ \"'.\") }}\n {% do run_query(\"alter session set query_tag = '{}'\".format(original_query_tag)) %}\n {% else %}\n {{ log(\"No original query_tag, unsetting parameter.\") }}\n {% do run_query(\"alter session unset query_tag\") %}\n {% endif %}\n {% endif %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy": { + "unique_id": "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/incremental.sql", + "original_file_path": "macros/materializations/incremental.sql", + "name": "dbt_snowflake_validate_get_incremental_strategy", + "macro_sql": "{% macro dbt_snowflake_validate_get_incremental_strategy(config) %}\n {#-- Find and validate the incremental strategy #}\n {%- set strategy = config.get(\"incremental_strategy\", default=\"merge\") -%}\n\n {% set invalid_strategy_msg -%}\n Invalid incremental strategy provided: {{ strategy }}\n Expected one of: 'merge', 'delete+insert'\n {%- endset %}\n {% if strategy not in ['merge', 'delete+insert'] %}\n {% do exceptions.raise_compiler_error(invalid_strategy_msg) %}\n {% endif %}\n\n {% do return(strategy) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql": { + "unique_id": "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/incremental.sql", + "original_file_path": "macros/materializations/incremental.sql", + "name": "dbt_snowflake_get_incremental_sql", + "macro_sql": "{% macro dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n {% if strategy == 'merge' %}\n {% do return(get_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% elif strategy == 'delete+insert' %}\n {% do return(get_delete_insert_merge_sql(target_relation, tmp_relation, unique_key, dest_columns)) %}\n {% else %}\n {% do exceptions.raise_compiler_error('invalid strategy: ' ~ strategy) %}\n {% endif %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_merge_sql", "macro.dbt.get_delete_insert_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.materialization_incremental_snowflake": { + "unique_id": "macro.dbt_snowflake.materialization_incremental_snowflake", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/incremental.sql", + "original_file_path": "macros/materializations/incremental.sql", + "name": "materialization_incremental_snowflake", + "macro_sql": "{% materialization incremental, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {% set target_relation = this %}\n {% set existing_relation = load_relation(this) %}\n {% set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_snowflake_validate_get_incremental_strategy(config) -%}\n\n -- setup\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view %}\n {#-- Can't overwrite a view with a table - we must drop --#}\n {{ log(\"Dropping relation \" ~ target_relation ~ \" because it is a view and this model is a table.\") }}\n {% do adapter.drop_relation(existing_relation) %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% else %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set dest_columns = adapter.get_columns_in_relation(target_relation) %}\n {% set build_sql = dbt_snowflake_get_incremental_sql(strategy, tmp_relation, target_relation, unique_key, dest_columns) %}\n {% endif %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {% set target_relation = target_relation.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.should_full_refresh", "macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt_snowflake.dbt_snowflake_validate_get_incremental_strategy", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.run_query", "macro.dbt_snowflake.dbt_snowflake_get_incremental_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.materialization_table_snowflake": { + "unique_id": "macro.dbt_snowflake.materialization_table_snowflake", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/table.sql", + "original_file_path": "macros/materializations/table.sql", + "name": "materialization_table_snowflake", + "macro_sql": "{% materialization table, adapter='snowflake' %}\n\n {% set original_query_tag = set_query_tag() %}\n\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database, type='table') -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {#-- Drop the relation if it was a view to \"convert\" it in a table. This may lead to\n -- downtime, but it should be a relatively infrequent occurrence #}\n {% if old_relation is not none and not old_relation.is_table %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ drop_relation_if_exists(old_relation) }}\n {% endif %}\n\n --build model\n {% call statement('main') -%}\n {{ create_table_as(false, target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.materialization_view_snowflake": { + "unique_id": "macro.dbt_snowflake.materialization_view_snowflake", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/view.sql", + "original_file_path": "macros/materializations/view.sql", + "name": "materialization_view_snowflake", + "macro_sql": "{% materialization view, adapter='snowflake' -%}\n\n {% set original_query_tag = set_query_tag() %}\n {% set to_return = create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n {% do persist_docs(target_relation, model, for_columns=false) %}\n\n {% do return(to_return) %}\n\n {% do unset_query_tag(original_query_tag) %}\n\n{%- endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.set_query_tag", "macro.dbt.create_or_replace_view", "macro.dbt.persist_docs", "macro.dbt_snowflake.unset_query_tag"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt_snowflake.snowflake__get_merge_sql": { + "unique_id": "macro.dbt_snowflake.snowflake__get_merge_sql", + "package_name": "dbt_snowflake", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/snowflake", + "path": "macros/materializations/merge.sql", + "original_file_path": "macros/materializations/merge.sql", + "name": "snowflake__get_merge_sql", + "macro_sql": "{% macro snowflake__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) -%}\n\n {#\n Workaround for Snowflake not being happy with a merge on a constant-false predicate.\n When no unique_key is provided, this macro will do a regular insert. If a unique_key\n is provided, then this macro will do a proper merge instead.\n #}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute='name')) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {%- if unique_key is none -%}\n\n {{ sql_header if sql_header is not none }}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source_sql }}\n );\n\n {%- else -%}\n\n {{ default__get_merge_sql(target, source_sql, unique_key, dest_columns, predicates) }}\n\n {%- endif -%}\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_quoted_csv", "macro.dbt.default__get_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.statement": { + "unique_id": "macro.dbt.statement", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/core.sql", + "original_file_path": "macros/core.sql", + "name": "statement", + "macro_sql": "{% macro statement(name=None, fetch_result=False, auto_begin=True) -%}\n {%- if execute: -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- set res, table = adapter.execute(sql, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.noop_statement": { + "unique_id": "macro.dbt.noop_statement", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/core.sql", + "original_file_path": "macros/core.sql", + "name": "noop_statement", + "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_test_sql": { + "unique_id": "macro.dbt.get_test_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/test.sql", + "original_file_path": "macros/materializations/test.sql", + "name": "get_test_sql", + "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__get_test_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_test_sql": { + "unique_id": "macro.dbt.default__get_test_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/test.sql", + "original_file_path": "macros/materializations/test.sql", + "name": "default__get_test_sql", + "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_test_default": { + "unique_id": "macro.dbt.materialization_test_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/test.sql", + "original_file_path": "macros/materializations/test.sql", + "name": "materialization_test_default", + "macro_sql": "\n\n{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n \n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n \n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n \n {% do relations.append(target_relation) %}\n \n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n \n {{ adapter.commit() }}\n \n {% else %}\n\n {% set main_sql = sql %}\n \n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n \n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.run_hooks": { + "unique_id": "macro.dbt.run_hooks", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "run_hooks", + "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.column_list": { + "unique_id": "macro.dbt.column_list", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "column_list", + "macro_sql": "{% macro column_list(columns) %}\n {%- for col in columns %}\n {{ col.name }} {% if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.column_list_for_create_table": { + "unique_id": "macro.dbt.column_list_for_create_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "column_list_for_create_table", + "macro_sql": "{% macro column_list_for_create_table(columns) %}\n {%- for col in columns %}\n {{ col.name }} {{ col.data_type }} {%- if not loop.last %},{% endif %}\n {% endfor -%}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.make_hook_config": { + "unique_id": "macro.dbt.make_hook_config", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "make_hook_config", + "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.before_begin": { + "unique_id": "macro.dbt.before_begin", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "before_begin", + "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.make_hook_config"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.in_transaction": { + "unique_id": "macro.dbt.in_transaction", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "in_transaction", + "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.make_hook_config"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.after_commit": { + "unique_id": "macro.dbt.after_commit", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "after_commit", + "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.make_hook_config"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.drop_relation_if_exists": { + "unique_id": "macro.dbt.drop_relation_if_exists", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "drop_relation_if_exists", + "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.load_relation": { + "unique_id": "macro.dbt.load_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "load_relation", + "macro_sql": "{% macro load_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.should_full_refresh": { + "unique_id": "macro.dbt.should_full_refresh", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "should_full_refresh", + "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.should_store_failures": { + "unique_id": "macro.dbt.should_store_failures", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/helpers.sql", + "original_file_path": "macros/materializations/helpers.sql", + "name": "should_store_failures", + "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_merge_sql": { + "unique_id": "macro.dbt.snapshot_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot_merge.sql", + "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", + "name": "snapshot_merge_sql", + "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql')(target, source, insert_cols) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__snapshot_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__snapshot_merge_sql": { + "unique_id": "macro.dbt.default__snapshot_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot_merge.sql", + "original_file_path": "macros/materializations/snapshot/snapshot_merge.sql", + "name": "default__snapshot_merge_sql", + "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n ;\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_columns": { + "unique_id": "macro.dbt.create_columns", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "create_columns", + "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns')(relation, columns) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__create_columns"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_columns": { + "unique_id": "macro.dbt.default__create_columns", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "default__create_columns", + "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.post_snapshot": { + "unique_id": "macro.dbt.post_snapshot", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "post_snapshot", + "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot')(staging_relation) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__post_snapshot"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__post_snapshot": { + "unique_id": "macro.dbt.default__post_snapshot", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "default__post_snapshot", + "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_staging_table": { + "unique_id": "macro.dbt.snapshot_staging_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "snapshot_staging_table", + "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select \n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n \n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n \n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.snapshot_get_time"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.build_snapshot_table": { + "unique_id": "macro.dbt.build_snapshot_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "build_snapshot_table", + "macro_sql": "{% macro build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_or_create_relation": { + "unique_id": "macro.dbt.get_or_create_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "get_or_create_relation", + "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.build_snapshot_staging_table": { + "unique_id": "macro.dbt.build_snapshot_staging_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "build_snapshot_staging_table", + "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, tmp_relation, select) }}\n {% endcall %}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_snapshot_default": { + "unique_id": "macro.dbt.materialization_snapshot_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/snapshot.sql", + "original_file_path": "macros/materializations/snapshot/snapshot.sql", + "name": "materialization_snapshot_default", + "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n\n {% if not adapter.check_schema_exists(model.database, model.schema) %}\n {% do create_schema(model.database, model.schema) %}\n {% endif %}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_sql']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.create_schema", "macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.strategy_dispatch": { + "unique_id": "macro.dbt.strategy_dispatch", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "strategy_dispatch", + "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_hash_arguments": { + "unique_id": "macro.dbt.snapshot_hash_arguments", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_hash_arguments", + "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments')(args) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__snapshot_hash_arguments"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__snapshot_hash_arguments": { + "unique_id": "macro.dbt.default__snapshot_hash_arguments", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "default__snapshot_hash_arguments", + "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_get_time": { + "unique_id": "macro.dbt.snapshot_get_time", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_get_time", + "macro_sql": "{% macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time')() }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__snapshot_get_time"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__snapshot_get_time": { + "unique_id": "macro.dbt.default__snapshot_get_time", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "default__snapshot_get_time", + "macro_sql": "{% macro default__snapshot_get_time() -%}\n {{ current_timestamp() }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.current_timestamp"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_timestamp_strategy": { + "unique_id": "macro.dbt.snapshot_timestamp_strategy", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_timestamp_strategy", + "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/fishtown-analytics/dbt/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.snapshot_hash_arguments"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_string_as_time": { + "unique_id": "macro.dbt.snapshot_string_as_time", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_string_as_time", + "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time')(timestamp) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__snapshot_string_as_time"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__snapshot_string_as_time": { + "unique_id": "macro.dbt.default__snapshot_string_as_time", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "default__snapshot_string_as_time", + "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_check_all_get_existing_columns": { + "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_check_all_get_existing_columns", + "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists) -%}\n {%- set query_columns = get_columns_in_query(node['compiled_sql']) -%}\n {%- if not target_exists -%}\n {# no table yet -> return whatever the query does #}\n {{ return([false, query_columns]) }}\n {%- endif -%}\n {# handle any schema changes #}\n {%- set target_table = node.get('alias', node.get('name')) -%}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=target_table) -%}\n {%- set existing_cols = get_columns_in_query('select * from ' ~ target_relation) -%}\n {%- set ns = namespace() -%} {# handle for-loop scoping with a namespace #}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(col) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return([ns.column_added, intersection]) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_columns_in_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.snapshot_check_strategy": { + "unique_id": "macro.dbt.snapshot_check_strategy", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/snapshot/strategies.sql", + "original_file_path": "macros/materializations/snapshot/strategies.sql", + "name": "snapshot_check_strategy", + "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n \n {% set select_current_time -%}\n select {{ snapshot_get_time() }} as snapshot_start\n {%- endset %}\n\n {#-- don't access the column by name, to avoid dealing with casing issues on snowflake #}\n {%- set now = run_query(select_current_time)[0][0] -%}\n {% if now is none or now is undefined -%}\n {%- do exceptions.raise_compiler_error('Could not get a snapshot start time from the database') -%}\n {%- endif %}\n {% set updated_at = config.get('updated_at', snapshot_string_as_time(now)) %}\n\n {% set column_added = false %}\n\n {% if check_cols_config == 'all' %}\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists) %}\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {% set check_cols = check_cols_config %}\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n TRUE\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.snapshot_get_time", "macro.dbt.run_query", "macro.dbt.snapshot_string_as_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.snapshot_hash_arguments"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_incremental_default": { + "unique_id": "macro.dbt.materialization_incremental_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/incremental/incremental.sql", + "original_file_path": "macros/materializations/incremental/incremental.sql", + "name": "materialization_incremental_default", + "macro_sql": "{% materialization incremental, default -%}\n\n {% set unique_key = config.get('unique_key') %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% set existing_relation = load_relation(this) %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n {% if existing_relation is none %}\n {% set build_sql = create_table_as(False, target_relation, sql) %}\n {% elif existing_relation.is_view or should_full_refresh() %}\n {#-- Make sure the backup doesn't exist so we don't encounter issues with the rename below #}\n {% set tmp_identifier = model['name'] + '__dbt_tmp' %}\n {% set backup_identifier = model['name'] + \"__dbt_backup\" %}\n\n {% set intermediate_relation = existing_relation.incorporate(path={\"identifier\": tmp_identifier}) %}\n {% set backup_relation = existing_relation.incorporate(path={\"identifier\": backup_identifier}) %}\n\n {% do adapter.drop_relation(intermediate_relation) %}\n {% do adapter.drop_relation(backup_relation) %}\n\n {% set build_sql = create_table_as(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% do to_drop.append(backup_relation) %}\n {% else %}\n {% set tmp_relation = make_temp_relation(target_relation) %}\n {% do run_query(create_table_as(True, tmp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=tmp_relation,\n to_relation=target_relation) %}\n {% set build_sql = incremental_upsert(tmp_relation, target_relation, unique_key=unique_key) %}\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %} \n {% do adapter.rename_relation(target_relation, backup_relation) %} \n {% do adapter.rename_relation(intermediate_relation, target_relation) %} \n {% endif %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.load_relation", "macro.dbt.run_hooks", "macro.dbt.create_table_as", "macro.dbt.should_full_refresh", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.incremental_upsert", "macro.dbt.statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.incremental_upsert": { + "unique_id": "macro.dbt.incremental_upsert", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/incremental/helpers.sql", + "original_file_path": "macros/materializations/incremental/helpers.sql", + "name": "incremental_upsert", + "macro_sql": "{% macro incremental_upsert(tmp_relation, target_relation, unique_key=none, statement_name=\"main\") %}\n {%- set dest_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set dest_cols_csv = dest_columns | map(attribute='quoted') | join(', ') -%}\n\n {%- if unique_key is not none -%}\n delete\n from {{ target_relation }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ tmp_relation }}\n );\n {%- endif %}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ tmp_relation }}\n );\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_csv_table": { + "unique_id": "macro.dbt.create_csv_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "create_csv_table", + "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table')(model, agate_table) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__create_csv_table"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.reset_csv_table": { + "unique_id": "macro.dbt.reset_csv_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "reset_csv_table", + "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__reset_csv_table"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.load_csv_rows": { + "unique_id": "macro.dbt.load_csv_rows", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "load_csv_rows", + "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows')(model, agate_table) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__load_csv_rows"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_csv_table": { + "unique_id": "macro.dbt.default__create_csv_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "default__create_csv_table", + "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__reset_csv_table": { + "unique_id": "macro.dbt.default__reset_csv_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "default__reset_csv_table", + "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.create_csv_table"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_seed_column_quoted_csv": { + "unique_id": "macro.dbt.get_seed_column_quoted_csv", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "get_seed_column_quoted_csv", + "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.basic_load_csv_rows": { + "unique_id": "macro.dbt.basic_load_csv_rows", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "basic_load_csv_rows", + "macro_sql": "{% macro basic_load_csv_rows(model, batch_size, agate_table) %}\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n %s\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_seed_column_quoted_csv"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__load_csv_rows": { + "unique_id": "macro.dbt.default__load_csv_rows", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "default__load_csv_rows", + "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n {{ return(basic_load_csv_rows(model, 10000, agate_table) )}}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.basic_load_csv_rows"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_seed_default": { + "unique_id": "macro.dbt.materialization_seed_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/seed/seed.sql", + "original_file_path": "macros/materializations/seed/seed.sql", + "name": "materialization_seed_default", + "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set agate_table = load_agate_table() -%}\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ create_table_sql }};\n -- dbt seed --\n {{ sql }}\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.persist_docs", "macro.dbt.create_indexes"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_table_default": { + "unique_id": "macro.dbt.materialization_table_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/table/table.sql", + "original_file_path": "macros/materializations/table/table.sql", + "name": "materialization_table_default", + "macro_sql": "{% materialization table, default %}\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier,\n schema=schema,\n database=database,\n type='table') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema,\n database=database,\n type='table') -%}\n\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema,\n database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_table_as(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.create_indexes", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_merge_sql": { + "unique_id": "macro.dbt.get_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "get_merge_sql", + "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, predicates=none) -%}\n {{ adapter.dispatch('get_merge_sql')(target, source, unique_key, dest_columns, predicates) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__get_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_delete_insert_merge_sql": { + "unique_id": "macro.dbt.get_delete_insert_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "get_delete_insert_merge_sql", + "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql')(target, source, unique_key, dest_columns) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__get_delete_insert_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_insert_overwrite_merge_sql": { + "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "get_insert_overwrite_merge_sql", + "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_merge_sql": { + "unique_id": "macro.dbt.default__get_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "default__get_merge_sql", + "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, predicates) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set update_columns = config.get('merge_update_columns', default = dest_columns | map(attribute=\"quoted\") | list) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{ predicates | join(' and ') }}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_quoted_csv"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_quoted_csv": { + "unique_id": "macro.dbt.get_quoted_csv", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "get_quoted_csv", + "macro_sql": "{% macro get_quoted_csv(column_names) %}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.common_get_delete_insert_merge_sql": { + "unique_id": "macro.dbt.common_get_delete_insert_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "common_get_delete_insert_merge_sql", + "macro_sql": "{% macro common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key is not none %}\n delete from {{ target }}\n where ({{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n );\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n );\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_quoted_csv"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_delete_insert_merge_sql": { + "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "default__get_delete_insert_merge_sql", + "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns) -%}\n {{ common_get_delete_insert_merge_sql(target, source, unique_key, dest_columns) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.common_get_delete_insert_merge_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_insert_overwrite_merge_sql": { + "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/common/merge.sql", + "original_file_path": "macros/materializations/common/merge.sql", + "name": "default__get_insert_overwrite_merge_sql", + "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_quoted_csv"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.materialization_view_default": { + "unique_id": "macro.dbt.materialization_view_default", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/view/view.sql", + "original_file_path": "macros/materializations/view/view.sql", + "name": "materialization_view_default", + "macro_sql": "{%- materialization view, default -%}\n\n {%- set identifier = model['alias'] -%}\n {%- set tmp_identifier = model['name'] + '__dbt_tmp' -%}\n {%- set backup_identifier = model['name'] + '__dbt_backup' -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set target_relation = api.Relation.create(identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {%- set intermediate_relation = api.Relation.create(identifier=tmp_identifier,\n schema=schema, database=database, type='view') -%}\n\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"old_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the old_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the old_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if old_relation is none else old_relation.type -%}\n {%- set backup_relation = api.Relation.create(identifier=backup_identifier,\n schema=schema, database=database,\n type=backup_relation_type) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exists for some reason\n {{ adapter.drop_relation(intermediate_relation) }}\n {{ adapter.drop_relation(backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if old_relation is not none %}\n {{ adapter.rename_relation(target_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_view_as", "macro.dbt.persist_docs", "macro.dbt.drop_relation_if_exists"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.handle_existing_table": { + "unique_id": "macro.dbt.handle_existing_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/view/create_or_replace_view.sql", + "original_file_path": "macros/materializations/view/create_or_replace_view.sql", + "name": "handle_existing_table", + "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', macro_namespace = 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__handle_existing_table"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__handle_existing_table": { + "unique_id": "macro.dbt.default__handle_existing_table", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/view/create_or_replace_view.sql", + "original_file_path": "macros/materializations/view/create_or_replace_view.sql", + "name": "default__handle_existing_table", + "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_or_replace_view": { + "unique_id": "macro.dbt.create_or_replace_view", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/materializations/view/create_or_replace_view.sql", + "original_file_path": "macros/materializations/view/create_or_replace_view.sql", + "name": "create_or_replace_view", + "macro_sql": "{% macro create_or_replace_view(run_outside_transaction_hooks=True) %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n\n {% if run_outside_transaction_hooks %}\n -- no transactions on BigQuery\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n {% endif %}\n\n -- `BEGIN` happens here on Snowflake\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ create_view_as(target_relation, sql) }}\n {%- endcall %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if run_outside_transaction_hooks %}\n -- No transactions on BigQuery\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n {% endif %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.create_view_as"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_columns_in_query": { + "unique_id": "macro.dbt.get_columns_in_query", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "get_columns_in_query", + "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__get_columns_in_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_columns_in_query": { + "unique_id": "macro.dbt.default__get_columns_in_query", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__get_columns_in_query", + "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endcall %}\n\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_schema": { + "unique_id": "macro.dbt.create_schema", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "create_schema", + "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema')(relation) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__create_schema"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_schema": { + "unique_id": "macro.dbt.default__create_schema", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__create_schema", + "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.drop_schema": { + "unique_id": "macro.dbt.drop_schema", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "drop_schema", + "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema')(relation) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__drop_schema"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__drop_schema": { + "unique_id": "macro.dbt.default__drop_schema", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__drop_schema", + "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_table_as": { + "unique_id": "macro.dbt.create_table_as", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "create_table_as", + "macro_sql": "{% macro create_table_as(temporary, relation, sql) -%}\n {{ adapter.dispatch('create_table_as')(temporary, relation, sql) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__create_table_as"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_table_as": { + "unique_id": "macro.dbt.default__create_table_as", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__create_table_as", + "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n as (\n {{ sql }}\n );\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_create_index_sql": { + "unique_id": "macro.dbt.get_create_index_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "get_create_index_sql", + "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql')(relation, index_dict)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__get_create_index_sql"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_create_index_sql": { + "unique_id": "macro.dbt.default__get_create_index_sql", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__get_create_index_sql", + "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_indexes": { + "unique_id": "macro.dbt.create_indexes", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "create_indexes", + "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes')(relation) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__create_indexes"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_indexes": { + "unique_id": "macro.dbt.default__create_indexes", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__create_indexes", + "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.create_view_as": { + "unique_id": "macro.dbt.create_view_as", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "create_view_as", + "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as')(relation, sql) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__create_view_as"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__create_view_as": { + "unique_id": "macro.dbt.default__create_view_as", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__create_view_as", + "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }} as (\n {{ sql }}\n );\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_catalog": { + "unique_id": "macro.dbt.get_catalog", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "get_catalog", + "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog')(information_schema, schemas)) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__get_catalog"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_catalog": { + "unique_id": "macro.dbt.default__get_catalog", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__get_catalog", + "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.get_columns_in_relation": { + "unique_id": "macro.dbt.get_columns_in_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "get_columns_in_relation", + "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation')(relation)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__get_columns_in_relation"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.sql_convert_columns_in_relation": { + "unique_id": "macro.dbt.sql_convert_columns_in_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "sql_convert_columns_in_relation", + "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__get_columns_in_relation": { + "unique_id": "macro.dbt.default__get_columns_in_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__get_columns_in_relation", + "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.alter_column_type": { + "unique_id": "macro.dbt.alter_column_type", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "alter_column_type", + "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type')(relation, column_name, new_column_type)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__alter_column_type"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.alter_column_comment": { + "unique_id": "macro.dbt.alter_column_comment", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "alter_column_comment", + "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment')(relation, column_dict)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__alter_column_comment"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__alter_column_comment": { + "unique_id": "macro.dbt.default__alter_column_comment", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__alter_column_comment", + "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.alter_relation_comment": { + "unique_id": "macro.dbt.alter_relation_comment", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "alter_relation_comment", + "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment')(relation, relation_comment)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__alter_relation_comment"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__alter_relation_comment": { + "unique_id": "macro.dbt.default__alter_relation_comment", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__alter_relation_comment", + "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.persist_docs": { + "unique_id": "macro.dbt.persist_docs", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "persist_docs", + "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__persist_docs"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__persist_docs": { + "unique_id": "macro.dbt.default__persist_docs", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__persist_docs", + "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__alter_column_type": { + "unique_id": "macro.dbt.default__alter_column_type", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__alter_column_type", + "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.drop_relation": { + "unique_id": "macro.dbt.drop_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "drop_relation", + "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation')(relation)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__drop_relation"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__drop_relation": { + "unique_id": "macro.dbt.default__drop_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__drop_relation", + "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.truncate_relation": { + "unique_id": "macro.dbt.truncate_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "truncate_relation", + "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation')(relation)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__truncate_relation"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__truncate_relation": { + "unique_id": "macro.dbt.default__truncate_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__truncate_relation", + "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.rename_relation": { + "unique_id": "macro.dbt.rename_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "rename_relation", + "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation')(from_relation, to_relation)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__rename_relation"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__rename_relation": { + "unique_id": "macro.dbt.default__rename_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__rename_relation", + "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.information_schema_name": { + "unique_id": "macro.dbt.information_schema_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "information_schema_name", + "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name')(database)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__information_schema_name"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__information_schema_name": { + "unique_id": "macro.dbt.default__information_schema_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__information_schema_name", + "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.list_schemas": { + "unique_id": "macro.dbt.list_schemas", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "list_schemas", + "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas')(database)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__list_schemas"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__list_schemas": { + "unique_id": "macro.dbt.default__list_schemas", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__list_schemas", + "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.check_schema_exists": { + "unique_id": "macro.dbt.check_schema_exists", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "check_schema_exists", + "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists')(information_schema, schema)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__check_schema_exists"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__check_schema_exists": { + "unique_id": "macro.dbt.default__check_schema_exists", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__check_schema_exists", + "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.run_query"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.list_relations_without_caching": { + "unique_id": "macro.dbt.list_relations_without_caching", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "list_relations_without_caching", + "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching')(schema_relation)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__list_relations_without_caching"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__list_relations_without_caching": { + "unique_id": "macro.dbt.default__list_relations_without_caching", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__list_relations_without_caching", + "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.current_timestamp": { + "unique_id": "macro.dbt.current_timestamp", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "current_timestamp", + "macro_sql": "{% macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp')() }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt_snowflake.snowflake__current_timestamp"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__current_timestamp": { + "unique_id": "macro.dbt.default__current_timestamp", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__current_timestamp", + "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter '+adapter.type()) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.collect_freshness": { + "unique_id": "macro.dbt.collect_freshness", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "collect_freshness", + "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__collect_freshness"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__collect_freshness": { + "unique_id": "macro.dbt.default__collect_freshness", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__collect_freshness", + "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness').table) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.make_temp_relation": { + "unique_id": "macro.dbt.make_temp_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "make_temp_relation", + "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation')(base_relation, suffix))}}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__make_temp_relation"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__make_temp_relation": { + "unique_id": "macro.dbt.default__make_temp_relation", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "default__make_temp_relation", + "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {% set tmp_identifier = base_relation.identifier ~ suffix %}\n {% set tmp_relation = base_relation.incorporate(\n path={\"identifier\": tmp_identifier}) -%}\n\n {% do return(tmp_relation) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.set_sql_header": { + "unique_id": "macro.dbt.set_sql_header", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/adapters/common.sql", + "original_file_path": "macros/adapters/common.sql", + "name": "set_sql_header", + "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.convert_datetime": { + "unique_id": "macro.dbt.convert_datetime", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/datetime.sql", + "original_file_path": "macros/etc/datetime.sql", + "name": "convert_datetime", + "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.dates_in_range": { + "unique_id": "macro.dbt.dates_in_range", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/datetime.sql", + "original_file_path": "macros/etc/datetime.sql", + "name": "dates_in_range", + "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.convert_datetime"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.partition_range": { + "unique_id": "macro.dbt.partition_range", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/datetime.sql", + "original_file_path": "macros/etc/datetime.sql", + "name": "partition_range", + "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.dates_in_range"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.py_current_timestring": { + "unique_id": "macro.dbt.py_current_timestring", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/datetime.sql", + "original_file_path": "macros/etc/datetime.sql", + "name": "py_current_timestring", + "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.generate_schema_name": { + "unique_id": "macro.dbt.generate_schema_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/get_custom_schema.sql", + "original_file_path": "macros/etc/get_custom_schema.sql", + "name": "generate_schema_name", + "macro_sql": "{% macro generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.generate_schema_name_for_env": { + "unique_id": "macro.dbt.generate_schema_name_for_env", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/get_custom_schema.sql", + "original_file_path": "macros/etc/get_custom_schema.sql", + "name": "generate_schema_name_for_env", + "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.run_query": { + "unique_id": "macro.dbt.run_query", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/query.sql", + "original_file_path": "macros/etc/query.sql", + "name": "run_query", + "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.statement"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.is_incremental": { + "unique_id": "macro.dbt.is_incremental", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/is_incremental.sql", + "original_file_path": "macros/etc/is_incremental.sql", + "name": "is_incremental", + "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.should_full_refresh"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.generate_database_name": { + "unique_id": "macro.dbt.generate_database_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/get_custom_database.sql", + "original_file_path": "macros/etc/get_custom_database.sql", + "name": "generate_database_name", + "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name')(custom_database_name, node)) %}\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__generate_database_name"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__generate_database_name": { + "unique_id": "macro.dbt.default__generate_database_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/get_custom_database.sql", + "original_file_path": "macros/etc/get_custom_database.sql", + "name": "default__generate_database_name", + "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.generate_alias_name": { + "unique_id": "macro.dbt.generate_alias_name", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/etc/get_custom_alias.sql", + "original_file_path": "macros/etc/get_custom_alias.sql", + "name": "generate_alias_name", + "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name is none -%}\n\n {{ node.name }}\n\n {%- else -%}\n\n {{ custom_alias_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__test_accepted_values": { + "unique_id": "macro.dbt.default__test_accepted_values", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/accepted_values.sql", + "original_file_path": "macros/schema_tests/accepted_values.sql", + "name": "default__test_accepted_values", + "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by 1\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.test_accepted_values": { + "unique_id": "macro.dbt.test_accepted_values", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/accepted_values.sql", + "original_file_path": "macros/schema_tests/accepted_values.sql", + "name": "test_accepted_values", + "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__test_accepted_values"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__test_relationships": { + "unique_id": "macro.dbt.default__test_relationships", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/relationships.sql", + "original_file_path": "macros/schema_tests/relationships.sql", + "name": "default__test_relationships", + "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nselect\n child.{{ column_name }}\n\nfrom {{ model }} as child\n\nleft join {{ to }} as parent\n on child.{{ column_name }} = parent.{{ field }}\n\nwhere child.{{ column_name }} is not null\n and parent.{{ field }} is null\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.test_relationships": { + "unique_id": "macro.dbt.test_relationships", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/relationships.sql", + "original_file_path": "macros/schema_tests/relationships.sql", + "name": "test_relationships", + "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__test_relationships"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__test_not_null": { + "unique_id": "macro.dbt.default__test_not_null", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/not_null.sql", + "original_file_path": "macros/schema_tests/not_null.sql", + "name": "default__test_not_null", + "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\nselect *\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.test_not_null": { + "unique_id": "macro.dbt.test_not_null", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/not_null.sql", + "original_file_path": "macros/schema_tests/not_null.sql", + "name": "test_not_null", + "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null') %}\n {{ macro(model, column_name) }}\n{% endtest %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__test_not_null"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.default__test_unique": { + "unique_id": "macro.dbt.default__test_unique", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/unique.sql", + "original_file_path": "macros/schema_tests/unique.sql", + "name": "default__test_unique", + "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }},\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": [] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + }, + "macro.dbt.test_unique": { + "unique_id": "macro.dbt.test_unique", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "macros/schema_tests/unique.sql", + "original_file_path": "macros/schema_tests/unique.sql", + "name": "test_unique", + "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique') %}\n {{ macro(model, column_name) }}\n{% endtest %}", + "resource_type": "macro", + "tags": [], + "depends_on": { + "macros": ["macro.dbt.default__test_unique"] + }, + "description": "", + "meta": {}, + "docs": { + "show": true + }, + "patch_path": null, + "arguments": [], + "created_at": 1625485389 + } + }, + "docs": { + "jaffle_shop.__overview__": { + "unique_id": "jaffle_shop.__overview__", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "overview.md", + "original_file_path": "models/overview.md", + "name": "__overview__", + "block_contents": "## Data Documentation for Jaffle Shop\n\n`jaffle_shop` is a fictional ecommerce store.\n\nThis [dbt](https://www.getdbt.com/) project is for testing out code.\n\nThe source code can be found [here](https://github.com/clrcrl/jaffle_shop)." + }, + "jaffle_shop.orders_status": { + "unique_id": "jaffle_shop.orders_status", + "package_name": "jaffle_shop", + "root_path": "/home/example/code/jaffle-shop", + "path": "docs.md", + "original_file_path": "models/docs.md", + "name": "orders_status", + "block_contents": "Orders can be one of the following statuses:\n\n| status | description |\n|----------------|------------------------------------------------------------------------------------------------------------------------|\n| placed | The order has been placed but has not yet left the warehouse |\n| shipped | The order has ben shipped to the customer and is currently in transit |\n| completed | The order has been received by the customer |\n| return_pending | The customer has indicated that they would like to return the order, but it has not yet been received at the warehouse |\n| returned | The order has been returned by the customer and received at the warehouse |" + }, + "dbt.__overview__": { + "unique_id": "dbt.__overview__", + "package_name": "dbt", + "root_path": "/home/example/tools/pyenv/versions/3.9.2/lib/python3.9/site-packages/dbt/include/global_project", + "path": "overview.md", + "original_file_path": "docs/overview.md", + "name": "__overview__", + "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--models` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/overview)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [chat](https://community.getdbt.com/) on Slack for live questions and support." + } + }, + "exposures": {}, + "selectors": {}, + "disabled": [], + "parent_map": { + "model.jaffle_shop.customers": ["model.jaffle_shop.stg_customers", "model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], + "model.jaffle_shop.orders": ["model.jaffle_shop.stg_orders", "model.jaffle_shop.stg_payments"], + "model.jaffle_shop.stg_customers": ["seed.jaffle_shop.raw_customers"], + "model.jaffle_shop.stg_payments": ["seed.jaffle_shop.raw_payments"], + "model.jaffle_shop.stg_orders": ["seed.jaffle_shop.raw_orders"], + "seed.jaffle_shop.raw_customers": [], + "seed.jaffle_shop.raw_orders": [], + "seed.jaffle_shop.raw_payments": [], + "test.jaffle_shop.unique_customers_customer_id.d48e126d80": ["model.jaffle_shop.customers"], + "test.jaffle_shop.not_null_customers_customer_id.923d2d910a": ["model.jaffle_shop.customers"], + "test.jaffle_shop.unique_orders_order_id.0d77ddcf59": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_order_id.4daff5eed7": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_customer_id.70722cc05f": ["model.jaffle_shop.orders"], + "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4": ["model.jaffle_shop.customers", "model.jaffle_shop.orders"], + "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_amount.f7bae8de1b": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa": ["model.jaffle_shop.orders"], + "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88": ["model.jaffle_shop.orders"], + "test.jaffle_shop.unique_stg_customers_customer_id.5530022331": ["model.jaffle_shop.stg_customers"], + "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1": ["model.jaffle_shop.stg_customers"], + "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48": ["model.jaffle_shop.stg_orders"], + "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90": ["model.jaffle_shop.stg_orders"], + "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f": ["model.jaffle_shop.stg_orders"], + "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6": ["model.jaffle_shop.stg_payments"], + "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012": ["model.jaffle_shop.stg_payments"], + "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081": ["model.jaffle_shop.stg_payments"] + }, + "child_map": { + "model.jaffle_shop.customers": ["test.jaffle_shop.not_null_customers_customer_id.923d2d910a", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4", "test.jaffle_shop.unique_customers_customer_id.d48e126d80"], + "model.jaffle_shop.orders": ["test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93", "test.jaffle_shop.not_null_orders_amount.f7bae8de1b", "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa", "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47", "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042", "test.jaffle_shop.not_null_orders_customer_id.70722cc05f", "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88", "test.jaffle_shop.not_null_orders_order_id.4daff5eed7", "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4", "test.jaffle_shop.unique_orders_order_id.0d77ddcf59"], + "model.jaffle_shop.stg_customers": ["model.jaffle_shop.customers", "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1", "test.jaffle_shop.unique_stg_customers_customer_id.5530022331"], + "model.jaffle_shop.stg_payments": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081", "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012", "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6"], + "model.jaffle_shop.stg_orders": ["model.jaffle_shop.customers", "model.jaffle_shop.orders", "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f", "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90", "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48"], + "seed.jaffle_shop.raw_customers": ["model.jaffle_shop.stg_customers"], + "seed.jaffle_shop.raw_orders": ["model.jaffle_shop.stg_orders"], + "seed.jaffle_shop.raw_payments": ["model.jaffle_shop.stg_payments"], + "test.jaffle_shop.unique_customers_customer_id.d48e126d80": [], + "test.jaffle_shop.not_null_customers_customer_id.923d2d910a": [], + "test.jaffle_shop.unique_orders_order_id.0d77ddcf59": [], + "test.jaffle_shop.not_null_orders_order_id.4daff5eed7": [], + "test.jaffle_shop.not_null_orders_customer_id.70722cc05f": [], + "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4": [], + "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93": [], + "test.jaffle_shop.not_null_orders_amount.f7bae8de1b": [], + "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042": [], + "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47": [], + "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa": [], + "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88": [], + "test.jaffle_shop.unique_stg_customers_customer_id.5530022331": [], + "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1": [], + "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48": [], + "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90": [], + "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f": [], + "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6": [], + "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012": [], + "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081": [] + } +} diff --git a/integration/common/tests/dbt/no_test_metadata/target/run_results.json b/integration/common/tests/dbt/no_test_metadata/target/run_results.json new file mode 100644 index 0000000000..8b1f57d9c5 --- /dev/null +++ b/integration/common/tests/dbt/no_test_metadata/target/run_results.json @@ -0,0 +1,378 @@ +{ + "metadata": { + "dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v12.json", + "dbt_version": "1.8.5", + "generated_at": "2021-07-05T11:43:26.134009Z", + "invocation_id": "ebb384bf-f2f3-4303-bb1b-98b1daa6d2d4", + "env": {} + }, + "results": [{ + "status": "fail", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:14.752733Z", + "completed_at": "2021-07-05T11:43:14.756125Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:14.756283Z", + "completed_at": "2021-07-05T11:43:16.146647Z" + }], + "thread_id": "Thread-1", + "execution_time": 1.5134623050689697, + "adapter_response": {}, + "message": "Test failure", + "failures": 1, + "unique_id": "test.jaffle_shop.unique_customers_customer_id.d48e126d80" + }, { + "status": "fail", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:16.267161Z", + "completed_at": "2021-07-05T11:43:16.270995Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:16.271247Z", + "completed_at": "2021-07-05T11:43:17.218104Z" + }], + "thread_id": "Thread-1", + "execution_time": 1.0679519176483154, + "adapter_response": {}, + "message": null, + "failures": "Test failure", + "unique_id": "test.jaffle_shop.not_null_customers_customer_id.923d2d910a" + }, { + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:17.336143Z", + "completed_at": "2021-07-05T11:43:17.338653Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:17.338870Z", + "completed_at": "2021-07-05T11:43:18.375002Z" + }], + "thread_id": "Thread-1", + "execution_time": 1.151458501815796, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.unique_orders_order_id.0d77ddcf59" + }, { + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:18.489684Z", + "completed_at": "2021-07-05T11:43:18.498060Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:18.498280Z", + "completed_at": "2021-07-05T11:43:21.900891Z" + }], + "thread_id": "Thread-1", + "execution_time": 3.539695978164673, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_order_id.4daff5eed7" + }, { + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:18.489684Z", + "completed_at": "2021-07-05T11:43:18.498060Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:18.498280Z", + "completed_at": "2021-07-05T11:43:21.900891Z" + }], + "thread_id": "Thread-1", + "execution_time": 3.539695978164673, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_customer_id.70722cc05f" + }, { + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.relationships_orders_customer_id__customer_id__ref_customers_.e153c026e4" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.accepted_values_orders_status__placed__shipped__completed__return_pending__returned.2e6d271b93" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_amount.f7bae8de1b" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_credit_card_amount.f6f7978042" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_coupon_amount.edd08a4b47" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_bank_transfer_amount.402a8a1daa" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_orders_gift_card_amount.6205906a88" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.unique_stg_customers_customer_id.5530022331" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_stg_customers_customer_id.4ab9034fe1" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.unique_stg_orders_order_id.99e62d7d48" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_stg_orders_order_id.052f14ae90" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.accepted_values_stg_orders_status__placed__shipped__completed__return_pending__returned.1b7358ad3f" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.unique_stg_payments_payment_id.5f5522e7d6" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.not_null_stg_payments_payment_id.ece096e012" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "test.jaffle_shop.accepted_values_stg_payments_payment_method__credit_card__coupon__bank_transfer__gift_card.59d3da1081" + },{ + "status": "pass", + "timing": [{ + "name": "compile", + "started_at": "2021-07-05T11:43:22.030881Z", + "completed_at": "2021-07-05T11:43:22.037396Z" + }, { + "name": "execute", + "started_at": "2021-07-05T11:43:22.037678Z", + "completed_at": "2021-07-05T11:43:24.406553Z" + }], + "thread_id": "Thread-1", + "execution_time": 2.5174121856689453, + "adapter_response": {}, + "message": "SUCCESS 1", + "failures": null, + "unique_id": "" + }], + "elapsed_time": 16.22612500190735, + "args": { + "log_format": "default", + "write_json": true, + "use_experimental_parser": false, + "profiles_dir": "./tests/dbt/no_test_metadata", + "use_cache": true, + "version_check": true, + "which": "test", + "rpc_method": "test" + } +} diff --git a/integration/common/tests/dbt/test_dbt_local.py b/integration/common/tests/dbt/test_dbt_local.py index 53b9be9011..57af7f16a5 100644 --- a/integration/common/tests/dbt/test_dbt_local.py +++ b/integration/common/tests/dbt/test_dbt_local.py @@ -70,9 +70,16 @@ def test_dbt_parse_and_compare_event(path, parent_run_metadata): assert match(json.load(f), events) +@pytest.mark.parametrize( + "path", + [ + "tests/dbt/test", + "tests/dbt/no_test_metadata", + ], +) @mock.patch("openlineage.common.provider.dbt.processor.generate_new_uuid") @mock.patch("datetime.datetime") -def test_dbt_parse_dbt_test_event(mock_datetime, mock_uuid, parent_run_metadata): +def test_dbt_parse_dbt_test_event(mock_datetime, mock_uuid, parent_run_metadata, path): mock_datetime.now.return_value.isoformat.return_value = "2021-08-25T11:00:25.277467+00:00" mock_uuid.side_effect = [ "6edf42ed-d8d0-454a-b819-d09b9067ff99", @@ -85,7 +92,7 @@ def test_dbt_parse_dbt_test_event(mock_datetime, mock_uuid, parent_run_metadata) processor = DbtLocalArtifactProcessor( producer="https://github.com/OpenLineage/OpenLineage/tree/0.0.1/integration/dbt", job_namespace="dbt-test-namespace", - project_dir="tests/dbt/test", + project_dir=path, ) processor.dbt_run_metadata = parent_run_metadata @@ -94,7 +101,7 @@ def test_dbt_parse_dbt_test_event(mock_datetime, mock_uuid, parent_run_metadata) attr.asdict(event, value_serializer=serialize) for event in dbt_events.starts + dbt_events.completes + dbt_events.fails ] - with open("tests/dbt/test/result.json") as f: + with open(f"{path}/result.json") as f: assert match(json.load(f), events) From 41d155f24b97b4d458cef2f7b203964bef7294df Mon Sep 17 00:00:00 2001 From: Artur Owczarek Date: Tue, 12 Nov 2024 01:18:04 +0100 Subject: [PATCH 63/89] fix: Wrong Scala version in Databricks integration tests configuration. (#3236) Signed-off-by: Artur Owczarek --- .circleci/workflows/openlineage-spark.yml | 4 ++-- .../io/openlineage/spark/agent/DatabricksEnvironment.java | 4 ++-- integration/sql/iface-py/script/setup-macos.sh | 1 + 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.circleci/workflows/openlineage-spark.yml b/.circleci/workflows/openlineage-spark.yml index 2745333b50..84ddd7f885 100644 --- a/.circleci/workflows/openlineage-spark.yml +++ b/.circleci/workflows/openlineage-spark.yml @@ -93,8 +93,8 @@ workflows: matrix: parameters: env-variant: [ - 'java:8-spark:3.4.2-scala:2.12-full-tests', - 'java:17-spark:3.5.2-scala:2.13-full-tests' + 'java:8-spark:3.4.1-scala:2.12-full-tests', + 'java:17-spark:3.5.0-scala:2.12-full-tests' ] requires: - approval-integration-spark diff --git a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java index 5552a1b70d..1f99ef9707 100644 --- a/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java +++ b/integration/spark/app/src/test/java/io/openlineage/spark/agent/DatabricksEnvironment.java @@ -64,9 +64,9 @@ public class DatabricksEnvironment implements AutoCloseable { public static final String CLUSTER_NAME = "openlineage-test-cluster"; public static final Map PLATFORM_VERSIONS_NAMES = - ImmutableMap.of("3.4.2", "13.3.x-scala2.12", "3.5.2", "14.2.x-scala2.12"); + ImmutableMap.of("3.4.1", "13.3.x-scala2.12", "3.5.0", "14.3.x-scala2.12"); public static final Map PLATFORM_VERSIONS = - ImmutableMap.of("3.4.2", "13.3", "3.5.2", "14.2"); + ImmutableMap.of("3.4.1", "13.3", "3.5.0", "14.3"); public static final String NODE_TYPE = "Standard_DS3_v2"; public static final String INIT_SCRIPT_FILE = "/Shared/open-lineage-init-script.sh"; public static final String DBFS_CLUSTER_LOGS = "dbfs:/databricks/openlineage/cluster-logs"; diff --git a/integration/sql/iface-py/script/setup-macos.sh b/integration/sql/iface-py/script/setup-macos.sh index 9b9e32b79e..123767ddc5 100755 --- a/integration/sql/iface-py/script/setup-macos.sh +++ b/integration/sql/iface-py/script/setup-macos.sh @@ -18,6 +18,7 @@ curl https://sh.rustup.rs -sSf | sh -s -- -y echo "Installing uv" curl -LsSf https://astral.sh/uv/install.sh | sh +source $HOME/.local/bin/env source $HOME/.cargo/env rustup target add aarch64-apple-darwin From de844ef210df596371aa83dbf0380d25c3b32647 Mon Sep 17 00:00:00 2001 From: Michael Robinson <68482867+merobi-hub@users.noreply.github.com> Date: Tue, 12 Nov 2024 05:16:20 -0500 Subject: [PATCH 64/89] Replace bitly links with permanent slack invite. (#3230) Signed-off-by: merobi-hub --- website/blog/0.1-release/index.mdx | 2 +- website/blog/nyc-meetup/index.mdx | 2 +- website/blog/python-client/index.mdx | 2 +- website/docs/integrations/airflow/airflow.md | 2 +- website/docs/integrations/dbt.md | 2 +- website/docs/integrations/great-expectations.md | 2 +- website/docs/integrations/trino.md | 2 +- website/docusaurus.config.js | 2 +- website/src/pages/home.tsx | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/website/blog/0.1-release/index.mdx b/website/blog/0.1-release/index.mdx index ed690a5147..b1ee60d5bc 100644 --- a/website/blog/0.1-release/index.mdx +++ b/website/blog/0.1-release/index.mdx @@ -30,4 +30,4 @@ This release includes: This is only the beginning. We invite everyone interested to [consult and contribute to the roadmap](https://github.com/OpenLineage/OpenLineage/projects). The roadmap currently contains, among other things: adding support for [Kafka](https://github.com/OpenLineage/OpenLineage/issues/152), [BI dashboards](https://github.com/OpenLineage/OpenLineage/issues/207), and [column level lineage](https://github.com/OpenLineage/OpenLineage/issues/148)...but you can influence it by participating! -Follow the [repo](https://github.com/OpenLineage/OpenLineage) to stay updated. And, as always, you can [join the conversation](http://bit.ly/OpenLineageSlack) on Slack. +Follow the [repo](https://github.com/OpenLineage/OpenLineage) to stay updated. And, as always, you can [join the conversation](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) on Slack. diff --git a/website/blog/nyc-meetup/index.mdx b/website/blog/nyc-meetup/index.mdx index 0c56bc7823..c2b8396389 100644 --- a/website/blog/nyc-meetup/index.mdx +++ b/website/blog/nyc-meetup/index.mdx @@ -34,7 +34,7 @@ to the building is on 19th Street. #### Getting In - If you arrive before 6 pm, simply come on up to the third floor! Otherwise, post -a message in [Slack](http://bit.ly/OpenLineageSlack) to let us know you're here, +a message in [Slack](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) to let us know you're here, and someone will let you in. ### Hope to see you there! diff --git a/website/blog/python-client/index.mdx b/website/blog/python-client/index.mdx index 9e87f2916d..3eee5a69ee 100644 --- a/website/blog/python-client/index.mdx +++ b/website/blog/python-client/index.mdx @@ -124,6 +124,6 @@ Check out the source code here: https://github.com/OpenLineage/OpenLineage/tree/ Interested in contributing to the project? Read our guide for new contributors: https://github.com/OpenLineage/OpenLineage/blob/main/CONTRIBUTING.md. -Join us on Slack: http://bit.ly/OpenLineageSlack. +Join us on Slack: https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA. Attend a community meeting: https://bit.ly/OLwikitsc. diff --git a/website/docs/integrations/airflow/airflow.md b/website/docs/integrations/airflow/airflow.md index a7bb2c4d55..9c0cdbe510 100644 --- a/website/docs/integrations/airflow/airflow.md +++ b/website/docs/integrations/airflow/airflow.md @@ -49,4 +49,4 @@ If you want to expose lineage as a one off in your workflow, [you can also manua ## Feedback -You can reach out to us on [slack](http://bit.ly/OpenLineageSlack) and leave us feedback! +You can reach out to us on [slack](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) and leave us feedback! diff --git a/website/docs/integrations/dbt.md b/website/docs/integrations/dbt.md index 837a38dd58..beca93c809 100644 --- a/website/docs/integrations/dbt.md +++ b/website/docs/integrations/dbt.md @@ -58,4 +58,4 @@ Emitted 4 openlineage events ## Feedback -What did you think of this guide? You can reach out to us on [slack](http://bit.ly/OpenLineageSlack) and leave us feedback! \ No newline at end of file +What did you think of this guide? You can reach out to us on [slack](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) and leave us feedback! \ No newline at end of file diff --git a/website/docs/integrations/great-expectations.md b/website/docs/integrations/great-expectations.md index 422fc7921e..e3715561b4 100644 --- a/website/docs/integrations/great-expectations.md +++ b/website/docs/integrations/great-expectations.md @@ -83,4 +83,4 @@ Then run your Great Expecations checkpoint with the CLI or your integration of c ## Feedback -What did you think of this guide? You can reach out to us on [slack](http://bit.ly/OpenLineageSlack) and leave us feedback! +What did you think of this guide? You can reach out to us on [slack](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) and leave us feedback! diff --git a/website/docs/integrations/trino.md b/website/docs/integrations/trino.md index c45d6bf4c2..b222c2f746 100644 --- a/website/docs/integrations/trino.md +++ b/website/docs/integrations/trino.md @@ -57,4 +57,4 @@ Current documentation on Trino OpenLineage Event Listener with full list of avai ## Feedback -What did you think of this guide? You can reach out to us on [slack](http://bit.ly/OpenLineageSlack) and leave us feedback! +What did you think of this guide? You can reach out to us on [slack](https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA) and leave us feedback! diff --git a/website/docusaurus.config.js b/website/docusaurus.config.js index a0990a51cd..699d66a3b2 100644 --- a/website/docusaurus.config.js +++ b/website/docusaurus.config.js @@ -35,7 +35,7 @@ const linksSocial = [ {href: 'https://fosstodon.org/@openlineage', label: 'Mastodon', rel: 'me'}, {href: 'https://twitter.com/OpenLineage', label: 'Twitter'}, {href: 'https://www.linkedin.com/groups/13927795/', label: 'LinkedIn'}, - {href: 'http://bit.ly/OpenLineageSlack', label: 'Slack'}, + {href: 'https://join.slack.com/t/openlineage/shared_invite/zt-2u4oiyz5h-TEmqpP4fVM5eCdOGeIbZvA', label: 'Slack'}, {href: 'https://github.com/OpenLineage/OpenLineage', label: 'GitHub'} ] diff --git a/website/src/pages/home.tsx b/website/src/pages/home.tsx index aa8a537fd2..81b9be0627 100644 --- a/website/src/pages/home.tsx +++ b/website/src/pages/home.tsx @@ -83,7 +83,7 @@ const Wall = ({ twoColumnWall = false, capitalizeTitleOnHome = false }) => { />