diff --git a/.DS_Store b/.DS_Store index 59096b5..a5f0a37 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.github/workflows/encrypt-and-upload-model.yml b/.github/workflows/encrypt-and-upload-model.yml index 14ce4d2..7dd93bb 100644 --- a/.github/workflows/encrypt-and-upload-model.yml +++ b/.github/workflows/encrypt-and-upload-model.yml @@ -1,9 +1,10 @@ -name: Model Upload with SSE-KMS +name: Model Upload with Versioning and Cleanup on: push: paths: - - "models/**" # Trigger on any file changes in the 'models' directory + - "models/**" # Trigger on changes in the 'models' directory + - "model_versions.json" # Trigger on changes in the version file jobs: upload_model: @@ -16,32 +17,66 @@ jobs: - name: Set up AWS CLI uses: aws-actions/configure-aws-credentials@v1 with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} # Securely use the secret key - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} # Securely use the secret key - aws-region: ${{ secrets.AWS_REGION }} # Use the region stored in the secret + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_REGION }} - - name: Find model files - id: find_models + - name: Compare model versions + id: compare_versions run: | - # Find all model files in the models directory - MODEL_FILES=$(find models -type f) - echo "Model files found: $MODEL_FILES" - echo "::set-output name=model_files::$MODEL_FILES" + # Read the current and previous model versions + CURRENT_VERSIONS=$(cat model_versions.json) + S3_BUCKET="${{ secrets.S3_BUCKET_NAME }}" + + CHANGED_MODELS="" + + for MODEL in $(jq -r 'keys[]' <<< "$CURRENT_VERSIONS"); do + CURRENT_VERSION=$(jq -r --arg model "$MODEL" '.[$model]' <<< "$CURRENT_VERSIONS") + S3_FILE="s3://$S3_BUCKET/${MODEL}-${CURRENT_VERSION}" + + # Check if the model with the current version already exists in S3 + if ! aws s3 ls "$S3_FILE" >/dev/null 2>&1; then + echo "New or updated model detected: $MODEL (version $CURRENT_VERSION)" + CHANGED_MODELS="$CHANGED_MODELS $MODEL" + fi + done + + echo "::set-output name=changed_models::$CHANGED_MODELS" - - name: Upload Models to S3 with SSE-KMS + - name: Upload updated models + if: steps.compare_versions.outputs.changed_models != '' run: | - # Variables - S3_BUCKET_NAME="${{ secrets.S3_BUCKET_NAME }}" # Use the secret for the bucket name - KMS_KEY_ID="${{ secrets.KMS_KEY_ID }}" # Use the secret for the KMS key ID - MODEL_FILES="${{ steps.find_models.outputs.model_files }}" # List of model files + CHANGED_MODELS="${{ steps.compare_versions.outputs.changed_models }}" + CURRENT_VERSIONS=$(cat model_versions.json) + S3_BUCKET="${{ secrets.S3_BUCKET_NAME }}" + KMS_KEY_ID="${{ secrets.KMS_KEY_ID }}" - # Loop over each model file - for MODEL_FILE in $MODEL_FILES - do - echo "Uploading model file to S3 with SSE-KMS: $MODEL_FILE" + for MODEL in $CHANGED_MODELS; do + CURRENT_VERSION=$(jq -r --arg model "$MODEL" '.[$model]' <<< "$CURRENT_VERSIONS") + MODEL_PATH="models/$MODEL" + S3_FILE="s3://$S3_BUCKET/${MODEL}-${CURRENT_VERSION}" - # Upload the model file directly to S3 with server-side encryption using KMS - aws s3 cp $MODEL_FILE s3://$S3_BUCKET_NAME/ --sse aws:kms --sse-kms-key-id $KMS_KEY_ID + echo "Uploading $MODEL (version $CURRENT_VERSION) to $S3_FILE" - echo "Model file uploaded successfully: $MODEL_FILE" + # Upload to S3 with SSE-KMS + aws s3 cp "$MODEL_PATH" "$S3_FILE" --sse aws:kms --sse-kms-key-id "$KMS_KEY_ID" + + # Check if upload was successful, then delete the file from the repo + if [ $? -eq 0 ]; then + echo "Model uploaded successfully, deleting $MODEL from the repository." + rm "$MODEL_PATH" + else + echo "Model upload failed for $MODEL." + fi done + + - name: Commit model deletions + if: steps.compare_versions.outputs.changed_models != '' + run: | + git config --global user.name "github-actions" + git config --global user.email "github-actions@github.com" + + # Stage deleted files and commit the changes + git add -u + git commit -m "Delete updated models from repo after S3 upload" + git push diff --git a/.gitignore b/.gitignore index 69da196..40087ad 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .DS_Store node_modules -.env \ No newline at end of file +.env +Model \ No newline at end of file diff --git a/models/antispofing.onnx b/frontend/models/antispofing.onnx similarity index 100% rename from models/antispofing.onnx rename to frontend/models/antispofing.onnx diff --git a/frontend/models/convit_tiny_Opset17.onnx b/frontend/models/convit_tiny_Opset17.onnx new file mode 100644 index 0000000..58eaba5 Binary files /dev/null and b/frontend/models/convit_tiny_Opset17.onnx differ diff --git a/medium_posts b/medium_posts deleted file mode 160000 index ca13780..0000000 --- a/medium_posts +++ /dev/null @@ -1 +0,0 @@ -Subproject commit ca1378071400d61cec4c3bee0f7a8ab385abe54e diff --git a/mern-backend/controllers/modelController.js b/mern-backend/controllers/modelController.js index d068a8f..32f5323 100644 --- a/mern-backend/controllers/modelController.js +++ b/mern-backend/controllers/modelController.js @@ -1,42 +1,70 @@ -const { fetchEncryptedFilesFromS3 } = require("../utils/s3utils"); +const { fetchDecryptedModelsFromS3 } = require("../utils/s3utils"); const { encryptModel } = require("../utils/encryptionUtils.js"); const { generateModelHash, signModelHash } = require("../utils/hashUtils.js"); const fs = require("fs"); const path = require("path"); +const { fetchDecryptedModelsFromS3 } = require("./fetchDecryptedModelsFromS3"); +const fs = require("fs").promises; +const path = require("path"); +const { encryptModel, generateModelHash, signModelHash } = require("./encryptionUtils"); // Assuming these utility functions are implemented elsewhere + exports.getAllEncryptedModels = async (req, res, next) => { try { - const modelKey = "antispofing.onnx"; - - const { modelFile } = await fetchEncryptedFilesFromS3( - modelKey - ); - - console.log("Decrypted model file:", modelFile); + // Load the versioning file + const versionsFilePath = path.resolve(__dirname, "../model_versions.json"); + const modelVersions = JSON.parse(await fs.readFile(versionsFilePath, "utf-8")); + // Extract the public key from the request body const publicKeyBase64 = req.body.publicKey; if (!publicKeyBase64) { return res.status(400).json({ message: "Public key is required" }); } - const { encryptedModel, encryptedAesKey, iv } = encryptModel(modelFile, publicKeyBase64); + // Prepare response for all models + const encryptedModels = await Promise.all( + Object.keys(modelVersions).map(async (modelName) => { + try { + // Fetch the decrypted model from S3 + const { modelFile } = await fetchDecryptedModelsFromS3(modelName); + + console.log(`Decrypted model file for ${modelName}:`, modelFile); + + // Encrypt the model + const { encryptedModel, encryptedAesKey, iv } = encryptModel(modelFile, publicKeyBase64); + + // Generate and sign the model hash + const modelHash = await generateModelHash(modelFile); + console.log(`Model hash for ${modelName}:`, modelHash); + const signedHash = signModelHash(modelHash); + console.log(`Signed hash for ${modelName}:`, signedHash); + + // Return encrypted model data + return { + modelName, + encryptedModel: encryptedModel.toString("base64"), + encryptedAesKey: encryptedAesKey.toString("base64"), + iv: iv.toString("base64"), + signedHash, + version: modelVersions[modelName], + }; + } catch (error) { + console.error(`Error processing model ${modelName}:`, error); + throw error; // Continue processing other models + } + }) + ); - const modelHash = await generateModelHash(modelFile); // Add await here - console.log("Backend Model hash:", modelHash); - const signedHash = signModelHash(modelHash); - console.log("Bakend Signed Model hash:", signedHash); res.status(200).json({ - message: "Model encrypted and signed successfully", - encryptedModel: encryptedModel.toString("base64"), - encryptedAesKey: encryptedAesKey.toString("base64"), - iv: iv.toString("base64"), - signedHash: signedHash, + message: "Models encrypted and signed successfully", + encryptedModels, }); } catch (error) { - console.error("Error fetching and decrypting models:", error); - res.status(500).json({ error: "Failed to fetch and decrypt models." }); + console.error("Error fetching and processing models:", error); + res.status(500).json({ error: "Failed to fetch and process models." }); } }; + exports.getPublicVerificationKey = async (req, res, next) => { try { const publicKey = fs.readFileSync(path.join(__dirname, "../digital_signature_keys/public_key.pem"), "utf8"); diff --git a/mern-backend/utils/s3utils.js b/mern-backend/utils/s3utils.js index 2a41fef..3ddbbe8 100644 --- a/mern-backend/utils/s3utils.js +++ b/mern-backend/utils/s3utils.js @@ -2,7 +2,7 @@ const AWS = require("aws-sdk"); const s3 = new AWS.S3(); -async function fetchEncryptedFilesFromS3(modelKey) { +async function fetchDecryptedModelsFromS3(modelKey) { const bucketName = process.env.S3_BUCKET_NAME; // Ensure this is set in your environment try { @@ -22,4 +22,4 @@ async function fetchEncryptedFilesFromS3(modelKey) { } } -module.exports = { fetchEncryptedFilesFromS3}; +module.exports = { fetchDecryptedModelsFromS3}; diff --git a/model_version.json b/model_version.json new file mode 100644 index 0000000..4820612 --- /dev/null +++ b/model_version.json @@ -0,0 +1,5 @@ +{ + "antispofing.onnx": "1.0.0", + "face_recognition.onnx": "1.0.0" + } + \ No newline at end of file diff --git a/models/antispoofing.onnx b/models/antispoofing.onnx new file mode 100644 index 0000000..67dfbaa Binary files /dev/null and b/models/antispoofing.onnx differ diff --git a/models/yolo-face-detection.onnx b/models/yolo-face-detection.onnx new file mode 100644 index 0000000..5db81d9 Binary files /dev/null and b/models/yolo-face-detection.onnx differ