Skip to content

Commit

Permalink
testing new workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
krrish-sehgal committed Nov 24, 2024
1 parent f5b97e8 commit cfc90bf
Show file tree
Hide file tree
Showing 11 changed files with 115 additions and 47 deletions.
Binary file modified .DS_Store
Binary file not shown.
81 changes: 58 additions & 23 deletions .github/workflows/encrypt-and-upload-model.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
name: Model Upload with SSE-KMS
name: Model Upload with Versioning and Cleanup

on:
push:
paths:
- "models/**" # Trigger on any file changes in the 'models' directory
- "models/**" # Trigger on changes in the 'models' directory
- "model_versions.json" # Trigger on changes in the version file

jobs:
upload_model:
Expand All @@ -16,32 +17,66 @@ jobs:
- name: Set up AWS CLI
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} # Securely use the secret key
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} # Securely use the secret key
aws-region: ${{ secrets.AWS_REGION }} # Use the region stored in the secret
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}

- name: Find model files
id: find_models
- name: Compare model versions
id: compare_versions
run: |
# Find all model files in the models directory
MODEL_FILES=$(find models -type f)
echo "Model files found: $MODEL_FILES"
echo "::set-output name=model_files::$MODEL_FILES"
# Read the current and previous model versions
CURRENT_VERSIONS=$(cat model_versions.json)
S3_BUCKET="${{ secrets.S3_BUCKET_NAME }}"
CHANGED_MODELS=""
for MODEL in $(jq -r 'keys[]' <<< "$CURRENT_VERSIONS"); do
CURRENT_VERSION=$(jq -r --arg model "$MODEL" '.[$model]' <<< "$CURRENT_VERSIONS")
S3_FILE="s3://$S3_BUCKET/${MODEL}-${CURRENT_VERSION}"
# Check if the model with the current version already exists in S3
if ! aws s3 ls "$S3_FILE" >/dev/null 2>&1; then
echo "New or updated model detected: $MODEL (version $CURRENT_VERSION)"
CHANGED_MODELS="$CHANGED_MODELS $MODEL"
fi
done
echo "::set-output name=changed_models::$CHANGED_MODELS"
- name: Upload Models to S3 with SSE-KMS
- name: Upload updated models
if: steps.compare_versions.outputs.changed_models != ''
run: |
# Variables
S3_BUCKET_NAME="${{ secrets.S3_BUCKET_NAME }}" # Use the secret for the bucket name
KMS_KEY_ID="${{ secrets.KMS_KEY_ID }}" # Use the secret for the KMS key ID
MODEL_FILES="${{ steps.find_models.outputs.model_files }}" # List of model files
CHANGED_MODELS="${{ steps.compare_versions.outputs.changed_models }}"
CURRENT_VERSIONS=$(cat model_versions.json)
S3_BUCKET="${{ secrets.S3_BUCKET_NAME }}"
KMS_KEY_ID="${{ secrets.KMS_KEY_ID }}"
# Loop over each model file
for MODEL_FILE in $MODEL_FILES
do
echo "Uploading model file to S3 with SSE-KMS: $MODEL_FILE"
for MODEL in $CHANGED_MODELS; do
CURRENT_VERSION=$(jq -r --arg model "$MODEL" '.[$model]' <<< "$CURRENT_VERSIONS")
MODEL_PATH="models/$MODEL"
S3_FILE="s3://$S3_BUCKET/${MODEL}-${CURRENT_VERSION}"
# Upload the model file directly to S3 with server-side encryption using KMS
aws s3 cp $MODEL_FILE s3://$S3_BUCKET_NAME/ --sse aws:kms --sse-kms-key-id $KMS_KEY_ID
echo "Uploading $MODEL (version $CURRENT_VERSION) to $S3_FILE"
echo "Model file uploaded successfully: $MODEL_FILE"
# Upload to S3 with SSE-KMS
aws s3 cp "$MODEL_PATH" "$S3_FILE" --sse aws:kms --sse-kms-key-id "$KMS_KEY_ID"
# Check if upload was successful, then delete the file from the repo
if [ $? -eq 0 ]; then
echo "Model uploaded successfully, deleting $MODEL from the repository."
rm "$MODEL_PATH"
else
echo "Model upload failed for $MODEL."
fi
done
- name: Commit model deletions
if: steps.compare_versions.outputs.changed_models != ''
run: |
git config --global user.name "github-actions"
git config --global user.email "[email protected]"
# Stage deleted files and commit the changes
git add -u
git commit -m "Delete updated models from repo after S3 upload"
git push
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
.DS_Store
node_modules
.env
.env
Model
File renamed without changes.
Binary file added frontend/models/convit_tiny_Opset17.onnx
Binary file not shown.
1 change: 0 additions & 1 deletion medium_posts
Submodule medium_posts deleted from ca1378
68 changes: 48 additions & 20 deletions mern-backend/controllers/modelController.js
Original file line number Diff line number Diff line change
@@ -1,42 +1,70 @@
const { fetchEncryptedFilesFromS3 } = require("../utils/s3utils");
const { fetchDecryptedModelsFromS3 } = require("../utils/s3utils");
const { encryptModel } = require("../utils/encryptionUtils.js");
const { generateModelHash, signModelHash } = require("../utils/hashUtils.js");
const fs = require("fs");
const path = require("path");
const { fetchDecryptedModelsFromS3 } = require("./fetchDecryptedModelsFromS3");
const fs = require("fs").promises;
const path = require("path");
const { encryptModel, generateModelHash, signModelHash } = require("./encryptionUtils"); // Assuming these utility functions are implemented elsewhere

exports.getAllEncryptedModels = async (req, res, next) => {
try {
const modelKey = "antispofing.onnx";

const { modelFile } = await fetchEncryptedFilesFromS3(
modelKey
);

console.log("Decrypted model file:", modelFile);
// Load the versioning file
const versionsFilePath = path.resolve(__dirname, "../model_versions.json");
const modelVersions = JSON.parse(await fs.readFile(versionsFilePath, "utf-8"));

// Extract the public key from the request body
const publicKeyBase64 = req.body.publicKey;
if (!publicKeyBase64) {
return res.status(400).json({ message: "Public key is required" });
}

const { encryptedModel, encryptedAesKey, iv } = encryptModel(modelFile, publicKeyBase64);
// Prepare response for all models
const encryptedModels = await Promise.all(
Object.keys(modelVersions).map(async (modelName) => {
try {
// Fetch the decrypted model from S3
const { modelFile } = await fetchDecryptedModelsFromS3(modelName);

console.log(`Decrypted model file for ${modelName}:`, modelFile);

// Encrypt the model
const { encryptedModel, encryptedAesKey, iv } = encryptModel(modelFile, publicKeyBase64);

// Generate and sign the model hash
const modelHash = await generateModelHash(modelFile);
console.log(`Model hash for ${modelName}:`, modelHash);
const signedHash = signModelHash(modelHash);
console.log(`Signed hash for ${modelName}:`, signedHash);

// Return encrypted model data
return {
modelName,
encryptedModel: encryptedModel.toString("base64"),
encryptedAesKey: encryptedAesKey.toString("base64"),
iv: iv.toString("base64"),
signedHash,
version: modelVersions[modelName],
};
} catch (error) {
console.error(`Error processing model ${modelName}:`, error);
throw error; // Continue processing other models
}
})
);

const modelHash = await generateModelHash(modelFile); // Add await here
console.log("Backend Model hash:", modelHash);
const signedHash = signModelHash(modelHash);
console.log("Bakend Signed Model hash:", signedHash);
res.status(200).json({
message: "Model encrypted and signed successfully",
encryptedModel: encryptedModel.toString("base64"),
encryptedAesKey: encryptedAesKey.toString("base64"),
iv: iv.toString("base64"),
signedHash: signedHash,
message: "Models encrypted and signed successfully",
encryptedModels,
});
} catch (error) {
console.error("Error fetching and decrypting models:", error);
res.status(500).json({ error: "Failed to fetch and decrypt models." });
console.error("Error fetching and processing models:", error);
res.status(500).json({ error: "Failed to fetch and process models." });
}
};


exports.getPublicVerificationKey = async (req, res, next) => {
try {
const publicKey = fs.readFileSync(path.join(__dirname, "../digital_signature_keys/public_key.pem"), "utf8");
Expand Down
4 changes: 2 additions & 2 deletions mern-backend/utils/s3utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ const AWS = require("aws-sdk");
const s3 = new AWS.S3();


async function fetchEncryptedFilesFromS3(modelKey) {
async function fetchDecryptedModelsFromS3(modelKey) {
const bucketName = process.env.S3_BUCKET_NAME; // Ensure this is set in your environment

try {
Expand All @@ -22,4 +22,4 @@ async function fetchEncryptedFilesFromS3(modelKey) {
}
}

module.exports = { fetchEncryptedFilesFromS3};
module.exports = { fetchDecryptedModelsFromS3};
5 changes: 5 additions & 0 deletions model_version.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"antispofing.onnx": "1.0.0",
"face_recognition.onnx": "1.0.0"
}

Binary file added models/antispoofing.onnx
Binary file not shown.
Binary file added models/yolo-face-detection.onnx
Binary file not shown.

0 comments on commit cfc90bf

Please sign in to comment.