Skip to content

Commit

Permalink
Change to boto3 and remove gcloud
Browse files Browse the repository at this point in the history
  • Loading branch information
bsmartradio committed Dec 13, 2024
1 parent 1e7bc60 commit 97528a1
Show file tree
Hide file tree
Showing 5 changed files with 587 additions and 393 deletions.
12 changes: 6 additions & 6 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,38 +2,38 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml

- repo: https://github.com/PyCQA/flake8
rev: 3.9.2
rev: 7.1.1
hooks:
- id: flake8

- repo: https://github.com/PyCQA/isort
rev: 5.8.0
rev: 5.13.2
hooks:
- id: isort
additional_dependencies:
- toml

- repo: https://github.com/psf/black
rev: 21.7b0
rev: 24.10.0
hooks:
- id: black

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.910
rev: v1.13.0
hooks:
- id: mypy
additional_dependencies:
- types-requests

- repo: https://github.com/jazzband/pip-tools
rev: 6.2.0
rev: 7.4.1
hooks:
- id: pip-compile
name: pip-compile dev-requirements.in
Expand Down
37 changes: 19 additions & 18 deletions alertdb/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import logging
import os.path

import boto3
import botocore
import google.api_core.exceptions
import google.cloud.storage as gcs

Expand Down Expand Up @@ -131,38 +133,37 @@ def get_schema(self, schema_id: str) -> bytes:
raise NotFoundError("schema not found") from file_not_found


class GoogleObjectStorageBackend(AlertDatabaseBackend):
"""
Retrieves alerts and schemas from a Google Cloud Storage bucket.
The path for alert and schema objects follows the scheme in DMTN-183.
"""
class USDFObjectStorageBackend(AlertDatabaseBackend):

def __init__(
self, gcp_project: str, packet_bucket_name: str, schema_bucket_name: str
self, endpoint_url: str, packet_bucket_name: str, schema_bucket_name: str
):
self.object_store_client = gcs.Client(project=gcp_project)
self.packet_bucket = self.object_store_client.bucket(packet_bucket_name)
self.schema_bucket = self.object_store_client.bucket(schema_bucket_name)
self.object_store_client = boto3.client(
"s3", endpoint_url=endpoint_url
) # Default way of getting a boto3 client that an talk to s3
self.packet_bucket = packet_bucket_name
self.schema_bucket = schema_bucket_name

def get_alert(self, alert_id: str) -> bytes:
logger.info("retrieving alert id=%s", alert_id)
try:
blob = self.packet_bucket.blob(
f"/alert_archive/v1/alerts/{alert_id}.avro.gz"
alert_key = f"/alert_archive/v1/alerts/{alert_id}.avro.gz" # boto3 terminology for objects, objects live in prefixes inside of buckets
blob = self.object_store_client.get_object(
Bucket=self.packet_bucket, Key=alert_key
)
return blob.download_as_bytes()
except google.api_core.exceptions.NotFound as not_found:
return blob["Body"].read()
except self.object_store_client.exceptions.NoSuchKey as not_found:
raise NotFoundError("alert not found") from not_found

def get_schema(self, schema_id: str) -> bytes:
logger.info("retrieving schema id=%s", schema_id)
try:
blob = self.schema_bucket.blob(
f"/alert_archive/v1/schemas/{schema_id}.json"
schema_key = f"/alert_archive/v1/schemas/{schema_id}.json"
blob = self.object_store_client.get_object(
Bucket=self.schema_bucket, Key=schema_key
)
return blob.download_as_bytes()
except google.api_core.exceptions.NotFound as not_found:
return blob["Body"].read()
except self.object_store_client.exceptions.NoSuchKey as not_found:
raise NotFoundError("alert not found") from not_found


Expand Down
Loading

0 comments on commit 97528a1

Please sign in to comment.