-
Notifications
You must be signed in to change notification settings - Fork 36
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Docker integ test with async API (#1003)
Signed-off-by: Norman Jordan <[email protected]>
- Loading branch information
1 parent
be1df0f
commit 5884fea
Showing
24 changed files
with
1,077 additions
and
35 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
73 changes: 73 additions & 0 deletions
73
docker/integ-test/configuration-updater/apply-configuration.sh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
/bin/sh | ||
|
||
# Copyright OpenSearch Contributors | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
# Login to Minio | ||
curl -q \ | ||
-c /tmp/minio-cookies.txt \ | ||
-H 'Content-Type: application/json' \ | ||
-d '{"accessKey": "minioadmin", "secretKey": "minioadmin"}' \ | ||
http://minio-S3:9001/api/v1/login | ||
# Delete the test bucket | ||
curl -b /tmp/minio-cookies.txt \ | ||
-X DELETE \ | ||
http://minio-S3:9001/api/v1/buckets/test | ||
# Create the integ-test bucket | ||
curl -q \ | ||
-b /tmp/minio-cookies.txt \ | ||
-X POST \ | ||
-H 'Content-Type: application/json' \ | ||
-d '{"name": "integ-test", "versioning": {"enabled": true, "excludePrefixes": [], "excludeFolders": false}, "locking": true}' \ | ||
http://minio-S3:9001/api/v1/buckets | ||
# Create the access key | ||
curl -q \ | ||
-b /tmp/minio-cookies.txt \ | ||
-X POST \ | ||
-H 'Content-Type: application/json' \ | ||
-d "{\"policy\": \"\", \"accessKey\": \"${S3_ACCESS_KEY}\", \"secretKey\": \"${S3_SECRET_KEY}\", \"description\": \"\", \"comment\": \"\", \"name\": \"\", \"expiry\": null}" \ | ||
http://minio-S3:9001/api/v1/service-account-credentials | ||
|
||
# Login to OpenSearch Dashboards | ||
echo ">>> Login to OpenSearch dashboards" | ||
curl -q \ | ||
-c /tmp/opensearch-cookies.txt \ | ||
-X POST \ | ||
-H 'Content-Type: application/json' \ | ||
-H 'Osd-Version: 2.18.0' \ | ||
-H 'Osd-Xsrf: fetch' \ | ||
-d "{\"username\": \"admin\", \"password\": \"${OPENSEARCH_ADMIN_PASSWORD}\"}" \ | ||
'http://opensearch-dashboards:5601/auth/login?dataSourceId=' | ||
if [ "$?" -eq "0" ]; then | ||
echo " >>> Login successful" | ||
else | ||
echo " >>> Login failed" | ||
fi | ||
# Create the S3/Glue datasource | ||
echo ">>> Creating datasource" | ||
curl -q \ | ||
-b /tmp/opensearch-cookies.txt \ | ||
-X POST \ | ||
-H 'Content-Type: application/json' \ | ||
-H 'Osd-Version: 2.18.0' \ | ||
-H 'Osd-Xsrf: fetch' \ | ||
-d "{\"name\": \"mys3\", \"allowedRoles\": [], \"connector\": \"s3glue\", \"properties\": {\"glue.auth.type\": \"iam_role\", \"glue.auth.role_arn\": \"arn:aws:iam::123456789012:role/S3Access\", \"glue.indexstore.opensearch.uri\": \"http://opensearch:9200\", \"glue.indexstore.opensearch.auth\": \"basicauth\", \"glue.indexstore.opensearch.auth.username\": \"admin\", \"glue.indexstore.opensearch.auth.password\": \"${OPENSEARCH_ADMIN_PASSWORD}\"}}" \ | ||
http://opensearch-dashboards:5601/api/directquery/dataconnections | ||
if [ "$?" -eq "0" ]; then | ||
echo " >>> S3 datasource created" | ||
else | ||
echo " >>> Failed to create S3 datasource" | ||
fi | ||
|
||
echo ">>> Setting cluster settings" | ||
curl -v \ | ||
-u "admin:${OPENSEARCH_ADMIN_PASSWORD}" \ | ||
-X PUT \ | ||
-H 'Content-Type: application/json' \ | ||
-d '{"persistent": {"plugins.query.executionengine.spark.config": "{\"applicationId\":\"integ-test\",\"executionRoleARN\":\"arn:aws:iam::xxxxx:role/emr-job-execution-role\",\"region\":\"us-west-2\", \"sparkSubmitParameters\": \"--conf spark.dynamicAllocation.enabled=false\"}"}}' \ | ||
http://opensearch:9200/_cluster/settings | ||
if [ "$?" -eq "0" ]; then | ||
echo " >>> Successfully set cluster settings" | ||
else | ||
echo " >>> Failed to set cluster settings" | ||
fi |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
# Copyright OpenSearch Contributors | ||
# SPDX-License-Identifier: Apache-2.0 | ||
|
||
FROM openjdk:21-jdk-bookworm | ||
|
||
WORKDIR /opt | ||
|
||
ENV HADOOP_HOME=/opt/hadoop-3.3.4 | ||
ENV HIVE_HOME=/opt/apache-hive-2.3.9-bin | ||
|
||
#RUN apt-get update | ||
RUN curl -L https://archive.apache.org/dist/hive/hive-2.3.9/apache-hive-2.3.9-bin.tar.gz | tar zxf - | ||
RUN curl -L https://archive.apache.org/dist/hadoop/common/hadoop-3.3.4/hadoop-3.3.4.tar.gz | tar zxf - | ||
RUN cp $HADOOP_HOME/share/hadoop/client/hadoop-client-api-3.3.4.jar $HIVE_HOME/lib/ | ||
RUN cp $HADOOP_HOME/share/hadoop/client/hadoop-client-runtime-3.3.4.jar $HIVE_HOME/lib/ | ||
RUN cp $HADOOP_HOME/share/hadoop/tools/lib/hadoop-aws-3.3.4.jar $HIVE_HOME/lib/ | ||
RUN cp $HADOOP_HOME/share/hadoop/tools/lib/aws-java-sdk-bundle-1.12.262.jar $HIVE_HOME/lib/ | ||
|
||
RUN groupadd -f -r hive --gid=1000 | ||
RUN useradd -r -g hive --uid=1000 -d ${HIVE_HOME} hive | ||
RUN chown hive:hive -R ${HIVE_HOME} | ||
|
||
RUN mkdir /data | ||
RUN chown hive:hive /data | ||
|
||
WORKDIR $HIVE_HOME | ||
EXPOSE 9083 | ||
ENTRYPOINT ["/opt/apache-hive-2.3.9-bin/bin/hive", "--service", "metastore"] | ||
USER hive |
Oops, something went wrong.