Skip to content

Commit

Permalink
restoring original state of gradle properties
Browse files Browse the repository at this point in the history
  • Loading branch information
lmassaoy committed Feb 3, 2025
1 parent e93e585 commit 2dbeda7
Show file tree
Hide file tree
Showing 6 changed files with 92 additions and 91 deletions.
2 changes: 1 addition & 1 deletion client/java/gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
Expand Down
8 changes: 4 additions & 4 deletions integration/spark-extension-interfaces/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -170,10 +170,10 @@ publishing {
}

// -------- removing the following section to enable the dependencies within the pom file --------
// // pom.withXml {
// // Node pomNode = asNode()
// // pomNode.remove(pomNode.get("dependencies"))
// // }
// pom.withXml {
// Node pomNode = asNode()
// pomNode.remove(pomNode.get("dependencies"))
// }
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
Original file line number Diff line number Diff line change
Expand Up @@ -31,97 +31,97 @@ class SparkOpenLineageExtensionVisitorTest {
SparkOpenLineageExtensionVisitor visitor = new SparkOpenLineageExtensionVisitor();
Map<String, String> options = Collections.singletonMap("path", "some-path");

// @Test
// void testDatasetIdentifierReturnedByLineageRelationProvider() {
// // given
// LineageRelationProvider provider =
// (sparkListenerEventName, openLineage, sqlContext, parameters) ->
// new DatasetIdentifier("name", "namespace")
// .withSymlink(
// new DatasetIdentifier.Symlink(
// "name1", "namespace1", DatasetIdentifier.SymlinkType.TABLE));
// assertThat(visitor.isDefinedAt(provider)).isTrue();
// Map<String, Object> result =
// visitor.apply(provider, event.getClass().getName(), sqlContext, options);
@Test
void testDatasetIdentifierReturnedByLineageRelationProvider() {
// given
LineageRelationProvider provider =
(sparkListenerEventName, openLineage, sqlContext, parameters) ->
new DatasetIdentifier("name", "namespace")
.withSymlink(
new DatasetIdentifier.Symlink(
"name1", "namespace1", DatasetIdentifier.SymlinkType.TABLE));
assertThat(visitor.isDefinedAt(provider)).isTrue();
Map<String, Object> result =
visitor.apply(provider, event.getClass().getName(), sqlContext, options);

// // then
// assertThat(result).extracting("name").isEqualTo("name");
// assertThat(result).extracting("namespace").isEqualTo("namespace");
// assertThat(result)
// .extracting("symlinks")
// .isEqualTo(
// list(
// ImmutableMap.builder()
// .put("name", "name1")
// .put("namespace", "namespace1")
// .put("type", "TABLE")
// .build()));
// }
// then
assertThat(result).extracting("name").isEqualTo("name");
assertThat(result).extracting("namespace").isEqualTo("namespace");
assertThat(result)
.extracting("symlinks")
.isEqualTo(
list(
ImmutableMap.builder()
.put("name", "name1")
.put("namespace", "namespace1")
.put("type", "TABLE")
.build()));
}

// @Test
// void testDatasetIdentifierReturnedByLineageRelation() {
// // given
// LineageRelation lineageRelation =
// (sparkListenerEventName, openLineage) ->
// new DatasetIdentifier("name", "namespace")
// .withSymlink(
// new DatasetIdentifier.Symlink(
// "name1", "namespace1", DatasetIdentifier.SymlinkType.TABLE));
@Test
void testDatasetIdentifierReturnedByLineageRelation() {
// given
LineageRelation lineageRelation =
(sparkListenerEventName, openLineage) ->
new DatasetIdentifier("name", "namespace")
.withSymlink(
new DatasetIdentifier.Symlink(
"name1", "namespace1", DatasetIdentifier.SymlinkType.TABLE));

// // when
// assertThat(visitor.isDefinedAt(lineageRelation)).isTrue();
// Map<String, Object> result = visitor.apply(lineageRelation, event.getClass().getName());
// when
assertThat(visitor.isDefinedAt(lineageRelation)).isTrue();
Map<String, Object> result = visitor.apply(lineageRelation, event.getClass().getName());

// // then
// assertThat(result).extracting("name").isEqualTo("name");
// assertThat(result).extracting("namespace").isEqualTo("namespace");
// assertThat(result)
// .extracting("symlinks")
// .isEqualTo(
// list(
// ImmutableMap.builder()
// .put("name", "name1")
// .put("namespace", "namespace1")
// .put("type", "TABLE")
// .build()));
// }
// then
assertThat(result).extracting("name").isEqualTo("name");
assertThat(result).extracting("namespace").isEqualTo("namespace");
assertThat(result)
.extracting("symlinks")
.isEqualTo(
list(
ImmutableMap.builder()
.put("name", "name1")
.put("namespace", "namespace1")
.put("type", "TABLE")
.build()));
}

// @Test
// void testFacetsReturnedByInputLineageNodeWithIdentifier() {
// // given
// InputLineageNode inputLineageNode =
// (sparkListenerEventName, openLineage) ->
// Collections.singletonList(
// new InputDatasetWithIdentifier(
// new DatasetIdentifier("a", "b"),
// openLineage.newDatasetFacetsBuilder(),
// openLineage.newInputDatasetInputFacetsBuilder()));
@Test
void testFacetsReturnedByInputLineageNodeWithIdentifier() {
// given
InputLineageNode inputLineageNode =
(sparkListenerEventName, openLineage) ->
Collections.singletonList(
new InputDatasetWithIdentifier(
new DatasetIdentifier("a", "b"),
openLineage.newDatasetFacetsBuilder(),
openLineage.newInputDatasetInputFacetsBuilder()));

// // when
// assertThat(visitor.isDefinedAt(inputLineageNode)).isTrue();
// Map<String, Object> result = visitor.apply(inputLineageNode, event.getClass().getName());
// when
assertThat(visitor.isDefinedAt(inputLineageNode)).isTrue();
Map<String, Object> result = visitor.apply(inputLineageNode, event.getClass().getName());

// // then
// assertThat(result).extracting("delegateNodes").isEqualTo(list());
// }
// then
assertThat(result).extracting("delegateNodes").isEqualTo(list());
}

// @Test
// void testFacetsReturnedByInputLineageNodeWithDelegates() {
// // given
// LogicalPlan delegate = mock(LogicalPlan.class);
// InputLineageNode inputLineageNode =
// (sparkListenerEventName, openLineage) ->
// Collections.singletonList(
// new InputDatasetWithDelegate(
// delegate,
// openLineage.newDatasetFacetsBuilder(),
// openLineage.newInputDatasetInputFacetsBuilder()));
@Test
void testFacetsReturnedByInputLineageNodeWithDelegates() {
// given
LogicalPlan delegate = mock(LogicalPlan.class);
InputLineageNode inputLineageNode =
(sparkListenerEventName, openLineage) ->
Collections.singletonList(
new InputDatasetWithDelegate(
delegate,
openLineage.newDatasetFacetsBuilder(),
openLineage.newInputDatasetInputFacetsBuilder()));

// // when
// assertThat(visitor.isDefinedAt(inputLineageNode)).isTrue();
// Map<String, Object> result = visitor.apply(inputLineageNode, event.getClass().getName());
// when
assertThat(visitor.isDefinedAt(inputLineageNode)).isTrue();
Map<String, Object> result = visitor.apply(inputLineageNode, event.getClass().getName());

// // then
// assertThat(result).extracting("delegateNodes").isEqualTo(list(delegate));
// }
// then
assertThat(result).extracting("delegateNodes").isEqualTo(list(delegate));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ public class NuEventEmitter {
private static final Set<String> WANTED_EVENT_NAME_SUBSTRINGS = new HashSet<>(
Arrays.asList(
".execute_insert_into_hadoop_fs_relation_command.",
".adaptive_spark_plan."
".adaptive_spark_plan.",
"."
)
);

Expand Down

0 comments on commit 2dbeda7

Please sign in to comment.