diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..f72d3b11 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,4 @@ +{ + "editor.tabSize": 2, + "editor.indentSize": 2 +} \ No newline at end of file diff --git a/README.md b/README.md index 24441abd..92e60486 100755 --- a/README.md +++ b/README.md @@ -329,15 +329,12 @@ resources: The unit tests in this module do not need a binding to the respective object stores, run them with `npm install`. To achieve a clean install, the command `rm -rf node_modules` should be used before installation. The integration tests need a binding to a real object store. Run them with `npm run test`. -To set the binding, provide the following environment variables: -- AWS_S3_BUCKET -- AWS_S3_REGION -- AWS_S3_ACCESS_KEY_ID -- AWS_S3_SECRET_ACCESS_KEY +To set the binding, please see the section [Storage Targets](#storage-targets). -##### Supported Storage Backends +##### Supported Storage Provider - **AWS S3** +- **Azure Blob Storage** ### Model Texts diff --git a/lib/aws-s3.js b/lib/aws-s3.js index 47a27d3c..b7e5d9c2 100644 --- a/lib/aws-s3.js +++ b/lib/aws-s3.js @@ -36,8 +36,9 @@ module.exports = class AWSAttachmentsService extends require("./basic") { const creds = cds.env.requires?.objectStore?.credentials if (!creds) { - logConfig.configValidation('objectStore.credentials', creds, false, - 'Bind an SAP Object Store instance to your application or configure separateObjectStore for multitenancy') + if (Object.keys(creds).includes('container_name')) { + throw new Error('Azure Blob Storage credentials found where AWS S3 credentials expected, please check your service bindings.') + } throw new Error("SAP Object Store instance is not bound.") } @@ -46,8 +47,9 @@ module.exports = class AWSAttachmentsService extends require("./basic") { const missingFields = requiredFields.filter(field => !creds[field]) if (missingFields.length > 0) { - logConfig.configValidation('objectStore.credentials', creds, false, - `Object Store credentials missing: ${missingFields.join(', ')}`) + if (Object.keys(creds).includes('container_name')) { + throw new Error('Azure Blob Storage credentials found where AWS S3 credentials expected, please check your service bindings.') + } throw new Error(`Missing Object Store credentials: ${missingFields.join(', ')}`) } @@ -101,31 +103,8 @@ module.exports = class AWSAttachmentsService extends require("./basic") { return } - // Validate Service Manager configuration - const serviceManagerCreds = cds.env.requires?.serviceManager?.credentials - if (!serviceManagerCreds) { - logConfig.configValidation('serviceManager.credentials', serviceManagerCreds, false, - 'Bind a Service Manager instance for separate object store mode') - throw new Error("Service Manager Instance is not bound") - } - - const { sm_url, url, clientid, clientsecret, certificate, key, certurl } = serviceManagerCreds - - // Validate required Service Manager fields - const requiredSmFields = ['sm_url', 'url', 'clientid'] - const missingSmFields = requiredSmFields.filter(field => !serviceManagerCreds[field]) - - if (missingSmFields.length > 0) { - logConfig.configValidation('serviceManager.credentials', serviceManagerCreds, false, - `Service Manager credentials missing: ${missingSmFields.join(', ')}`) - throw new Error(`Missing Service Manager credentials: ${missingSmFields.join(', ')}`) - } - - logConfig.debug('Fetching access token for tenant', { tenantID, sm_url }) - const token = await utils.fetchToken(url, clientid, clientsecret, certificate, key, certurl) - logConfig.debug('Fetching object store credentials for tenant', { tenantID }) - const objectStoreCreds = await utils.getObjectStoreCredentials(tenantID, sm_url, token) + const objectStoreCreds = await utils.getObjectStoreCredentials(tenantID) if (!objectStoreCreds) { logConfig.withSuggestion('error', @@ -272,24 +251,21 @@ module.exports = class AWSAttachmentsService extends require("./basic") { }) // Initiate malware scan if configured - if (this.kind === 's3') { - logConfig.debug('Initiating malware scan for uploaded file', { - fileId: metadata.ID, - filename: metadata.filename - }) - - const scanRequestJob = cds.spawn(async () => { - await scanRequest(attachments, { ID: metadata.ID }) - }) + logConfig.debug('Initiating malware scan for uploaded file', { + fileId: metadata.ID, + filename: metadata.filename + }) - scanRequestJob.on('error', (err) => { - logConfig.withSuggestion('error', - 'Failed to initiate malware scan for attachment', err, - 'Check malware scanner configuration and connectivity', - { fileId: metadata.ID, filename: metadata.filename, errorMessage: err.message }) - }) - } + const scanRequestJob = cds.spawn(async () => { + await scanRequest(attachments, { ID: metadata.ID }) + }) + scanRequestJob.on('error', (err) => { + logConfig.withSuggestion('error', + 'Failed to initiate malware scan for attachment', err, + 'Check malware scanner configuration and connectivity', + { fileId: metadata.ID, filename: metadata.filename, errorMessage: err.message }) + }) } catch (err) { const duration = Date.now() - startTime logConfig.withSuggestion('error', @@ -385,7 +361,7 @@ module.exports = class AWSAttachmentsService extends require("./basic") { * @param {import('express').NextFunction} next - The next middleware function */ async updateContentHandler(req, next) { - logConfig.debug(`[S3 Upload] Uploading file using updateContentHandler for ${req.target.name}`) + logConfig.debug(`[AWS S3] Uploading file using updateContentHandler for ${req.target.name}`) // Check separate object store instances if (separateObjectStore) { @@ -393,13 +369,13 @@ module.exports = class AWSAttachmentsService extends require("./basic") { await this.createClientS3(tenantID) } - const targetID = req.data.ID || req.params[1]?.ID || req.params[1]; + const targetID = req.data.ID || req.params[1]?.ID || req.params[1] if (!targetID) { - req.reject(400, "Missing ID in request"); + req.reject(400, "Missing ID in request") } if (req?.data?.content) { - const response = await SELECT.from(req.target, { ID: targetID }).columns("url"); + const response = await SELECT.from(req.target, { ID: targetID }).columns("url") if (response?.url) { const Key = response.url const input = { @@ -416,7 +392,7 @@ module.exports = class AWSAttachmentsService extends require("./basic") { const keys = { ID: targetID } const scanRequestJob = cds.spawn(async () => { - await scanRequest(req.target, keys, req) + await scanRequest(req.target, keys) }) scanRequestJob.on('error', async (err) => { @@ -426,12 +402,12 @@ module.exports = class AWSAttachmentsService extends require("./basic") { { keys, errorMessage: err.message }) }) - logConfig.debug(`[S3 Upload] Uploaded file using updateContentHandler for ${req.target.name}`) + logConfig.debug(`[AWS S3] Uploaded file using updateContentHandler for ${req.target.name}`) } } else if (req?.data?.note) { const key = { ID: targetID } await super.update(req.target, key, { note: req.data.note }) - logConfig.debug(`[S3 Upload] Updated file upload with note for ${req.target.name}`) + logConfig.debug(`[AWS S3] Updated file upload with note for ${req.target.name}`) } else { next() } @@ -488,7 +464,7 @@ module.exports = class AWSAttachmentsService extends require("./basic") { */ async delete(Key) { const tenantID = cds.context.tenant - logConfig.debug(`[S3 Upload] Executing delete for file ${Key} in bucket ${this.bucket}`) + logConfig.debug(`[AWS S3] Executing delete for file ${Key} in bucket ${this.bucket}`) // Check separate object store instances if (separateObjectStore) { @@ -505,6 +481,7 @@ module.exports = class AWSAttachmentsService extends require("./basic") { } /** + * Requires implementation as delete of infected attachment is specific to storage service * @inheritdoc */ async deleteInfectedAttachment(Attachments, key) { diff --git a/lib/azure-blob-storage.js b/lib/azure-blob-storage.js new file mode 100644 index 00000000..d9eb412e --- /dev/null +++ b/lib/azure-blob-storage.js @@ -0,0 +1,456 @@ +const { BlobServiceClient } = require('@azure/storage-blob') +const { scanRequest } = require('./malwareScanner') +const cds = require("@sap/cds") +const utils = require('./helper') +const { SELECT } = cds.ql +const { logConfig } = require('./logger') + +const isMultitenacyEnabled = !!cds.env.requires.multitenancy +const objectStoreKind = cds.env.requires?.attachments?.objectStore?.kind +const separateObjectStore = isMultitenacyEnabled && objectStoreKind === "separate" + +const azureClientsCache = {} +module.exports = class AzureAttachmentsService extends require("./basic") { + /** + * Initializes the Azure Blob Storage Attachments Service + */ + init() { + // Log initial configuration + logConfig.info('Azure Blob Storage Attachments Service initialization', { + multitenancy: isMultitenacyEnabled, + objectStoreKind, + separateObjectStore, + attachmentsConfig: { + kind: cds.env.requires?.attachments?.kind, + scan: cds.env.requires?.attachments?.scan + } + }) + + logConfig.processStep('Initializing Azure Blob Storage Attachments Service', { + separateObjectStore + }) + + // For single tenant or shared object store instance + if (!separateObjectStore) { + const creds = cds.env.requires?.objectStore?.credentials + + if (!creds) { + if (Object.keys(creds).includes('bucket')) { + throw new Error('AWS S3 credentials found where Azure Blob Storage credentials expected, please check your service bindings.') + } + throw new Error("SAP Object Store instance is not bound.") + } + + // Validate required credentials + const requiredFields = ['container_name', 'container_uri', 'sas_token'] + const missingFields = requiredFields.filter(field => !creds[field]) + + if (missingFields.length > 0) { + logConfig.configValidation('objectStore.credentials', creds, false, + `Azure Blob Storage credentials missing: ${missingFields.join(', ')}`) + throw new Error(`Missing Azure Blob Storage credentials: ${missingFields.join(', ')}`) + } + + logConfig.info('Configuring shared Azure Blob Storage client', { + containerName: creds.container_name, + containerUri: creds.container_uri, + hasSasToken: !!creds.sas_token + }) + + this.containerName = creds.container_name + this.blobServiceClient = new BlobServiceClient(`${creds.container_uri}?${creds.sas_token}`) + this.containerClient = this.blobServiceClient.getContainerClient(creds.container_name) + + logConfig.info('Azure Blob Storage client initialized successfully', { + containerName: this.containerName + }) + + return super.init() + } else { + logConfig.info('Separate object store mode enabled - clients will be created per tenant') + } + + this.on('DeleteAttachment', async msg => { + await this.delete(msg.url); + }); + } + + /** + * Creates or retrieves a cached Azure Blob Storage client for the given tenant + * @param {String} tenantID - The tenant ID for which to create/retrieve the client + */ + async createAzureClient(tenantID) { + logConfig.processStep('Creating tenant-specific Azure Blob Storage client', { tenantID }) + + try { + // Check cache first + if (azureClientsCache[tenantID]) { + logConfig.debug('Using cached Azure Blob Storage client', { + tenantID, + containerName: azureClientsCache[tenantID].containerName + }) + this.blobServiceClient = azureClientsCache[tenantID].blobServiceClient + this.containerClient = azureClientsCache[tenantID].containerClient + this.containerName = azureClientsCache[tenantID].containerName + return + } + + logConfig.debug('Fetching object store credentials for tenant', { tenantID }) + const objectStoreCreds = await utils.getObjectStoreCredentials(tenantID) + + if (!objectStoreCreds) { + logConfig.withSuggestion('error', + 'Object store credentials not found for tenant', null, + 'Ensure Azure Blob Storage instance is subscribed and bound for this tenant', + { tenantID }) + throw new Error(`Azure Blob Storage instance not bound for tenant ${tenantID}`) + } + + // Validate object store credentials + const requiredOsFields = ['container_name', 'container_uri', 'sas_token'] + const missingOsFields = requiredOsFields.filter(field => !objectStoreCreds.credentials?.[field]) + + if (missingOsFields.length > 0) { + logConfig.withSuggestion('error', + 'Object store credentials incomplete', null, + 'Check Azure Blob Storage instance configuration and binding', + { tenantID, missingFields: missingOsFields }) + throw new Error(`Incomplete Azure Blob Storage credentials: ${missingOsFields.join(', ')}`) + } + + logConfig.debug('Creating Azure Blob Storage client for tenant', { + tenantID, + containerName: objectStoreCreds.credentials.container_name + }) + + const creds = objectStoreCreds.credentials + const blobServiceClient = new BlobServiceClient(creds.container_uri + "?" + creds.sas_token) + const containerClient = blobServiceClient.getContainerClient(creds.container_name) + + azureClientsCache[tenantID] = { + blobServiceClient, + containerClient, + containerName: creds.container_name, + } + + this.blobServiceClient = azureClientsCache[tenantID].blobServiceClient + this.containerClient = azureClientsCache[tenantID].containerClient + this.containerName = azureClientsCache[tenantID].containerName + + logConfig.debug('Azure Blob Storage client has been created successful', { + tenantID, + containerName: this.containerName + }) + + } catch (error) { + logConfig.withSuggestion('error', + 'Failed to create tenant-specific Azure Blob Storage client', error, + 'Check Service Manager and Azure Blob Storage instance configuration', + { tenantID }) + throw error + } + } + + /** + * @inheritdoc + */ + async put(attachments, data, isDraftEnabled, _content, req) { + const startTime = Date.now() + + logConfig.processStep('Starting file upload to Azure Blob Storage', { + attachmentEntity: attachments.name, + isDraftEnabled, + tenant: req?.tenant + }) + + try { + // Check separate object store instances + if (separateObjectStore) { + const tenantID = cds.context.tenant + if (!tenantID) { + logConfig.withSuggestion('error', + 'Tenant ID required for separate object store mode', null, + 'Ensure request context includes tenant information', + { separateObjectStore, hasTenant: !!tenantID }) + throw new Error('Tenant ID required for separate object store') + } + await this.createAzureClient(tenantID) + } + + if (Array.isArray(data)) { + logConfig.debug('Processing bulk file upload', { + fileCount: data.length, + filenames: data.map(d => d.filename) + }) + return Promise.all( + data.map((d) => this.put(attachments, d, isDraftEnabled, _content, req)) + ) + } + + const { content = _content, ...metadata } = data + const blobName = metadata.url + + if (!blobName) { + logConfig.withSuggestion('error', + 'File key/URL is required for Azure Blob Storage upload', null, + 'Ensure attachment data includes a valid URL/key', + { metadata: { ...metadata, content: !!content } }) + throw new Error('File key is required for upload') + } + + if (!content) { + logConfig.withSuggestion('error', + 'File content is required for Azure Blob Storage upload', null, + 'Ensure attachment data includes file content', + { key: blobName, hasContent: !!content }) + throw new Error('File content is required for upload') + } + + const blobClient = this.containerClient.getBlockBlobClient(blobName) + + logConfig.debug('Uploading file to Azure Blob Storage', { + containerName: this.containerName, + blobName, + filename: metadata.filename, + contentSize: content.length || content.size || 'unknown' + }) + + const stored = super.put(attachments, metadata, null, isDraftEnabled) + await Promise.all([stored, blobClient.uploadData(content)]) + + const duration = Date.now() - startTime + logConfig.debug('File upload to Azure Blob Storage completed successfully', { + filename: metadata.filename, + fileId: metadata.ID, + containerName: this.containerName, + blobName, + duration + }) + + // Initiate malware scan if configured + logConfig.debug('Initiating malware scan for uploaded file', { + fileId: metadata.ID, + filename: metadata.filename + }) + + const scanRequestJob = cds.spawn(async () => { + await scanRequest(attachments, { ID: metadata.ID }) + }) + + scanRequestJob.on('error', (err) => { + logConfig.withSuggestion('error', + 'Failed to initiate malware scan for attachment', err, + 'Check malware scanner configuration and connectivity', + { fileId: metadata.ID, filename: metadata.filename, errorMessage: err.message }) + }) + } catch (err) { + const duration = Date.now() - startTime + logConfig.withSuggestion('error', + 'File upload to Azure Blob Storage failed', err, + 'Check Azure Blob Storage connectivity, credentials, and container permissions', + { filename: data?.filename, fileId: data?.ID, containerName: this.containerName, blobName: data?.url, duration }) + throw err + } + } + + /** + * @inheritdoc + */ + async get(attachments, keys) { + const startTime = Date.now() + + const tenantID = cds.context.tenant + + logConfig.processStep('Starting stream from Azure Blob Storage', { + attachmentEntity: attachments.name, + keys, + tenant: tenantID + }) + + try { + // Check separate object store instances + if (separateObjectStore) { + if (!tenantID) { + logConfig.withSuggestion('error', + 'Tenant ID required for separate object store mode', null, + 'Ensure request context includes tenant information', + { separateObjectStore, hasTenant: !!tenantID }) + throw new Error('Tenant ID required for separate object store') + } + await this.createAzureClient(tenantID) + } + + logConfig.debug('Fetching attachment metadata', { keys }) + const response = await SELECT.from(attachments, keys).columns("url") + + if (!response?.url) { + logConfig.withSuggestion('warn', + 'File URL not found in database', null, + 'Check if the attachment exists and has been properly uploaded', + { keys, hasResponse: !!response }) + return null + } + + const blobName = response.url + + logConfig.debug('Streaming file from Azure Blob Storage', { + containerName: this.containerName, + blobName + }) + + const blobClient = this.containerClient.getBlockBlobClient(blobName) + const downloadResponse = await blobClient.download() + + const duration = Date.now() - startTime + logConfig.debug('File streamed from Azure Blob Storage successfully', { + fileId: keys.ID, + containerName: this.containerName, + blobName, + duration + }) + + return downloadResponse.readableStreamBody + + } catch (error) { + const duration = Date.now() - startTime + const suggestion = error.code === 'BlobNotFound' ? + 'File may have been deleted from Azure Blob Storage or URL is incorrect' : + error.code === 'AuthenticationFailed' ? + 'Check Azure Blob Storage credentials and SAS token' : + 'Check Azure Blob Storage connectivity and configuration' + + logConfig.withSuggestion('error', + 'File download from Azure Blob Storage failed', error, + suggestion, + { fileId: keys?.ID, containerName: this.containerName, attachmentName: attachments.name, duration }) + + throw error + } + } + + /** + * Registers attachment handlers for the given service and entity + * @param {import('@sap/cds').Request} req - The request object + * @param {import('express').NextFunction} next - The next middleware function + */ + async updateContentHandler(req, next) { + logConfig.debug(`[Azure] Uploading file using updateContentHandler for ${req.target.name}`) + // Check separate object store instances + if (separateObjectStore) { + const tenantID = cds.context.tenant + await this.createAzureClient(tenantID) + } + + const targetID = req.data.ID || req.params[1]?.ID || req.params[1]; + if (!targetID) { + req.reject(400, "Missing ID in request"); + } + + if (req?.data?.content) { + const response = await SELECT.from(req.target, { ID: targetID }).columns("url") + if (response?.url) { + const blobName = response.url + const blobClient = this.containerClient.getBlockBlobClient(blobName) + + // Handle different content types for update + let contentLength + const content = req.data.content + if (Buffer.isBuffer(content)) { + contentLength = content.length + } else if (content && typeof content.length === 'number') { + contentLength = content.length + } else if (content && typeof content.size === 'number') { + contentLength = content.size + } else { + // Convert to buffer if needed + const chunks = [] + for await (const chunk of content) { + chunks.push(chunk) + } + req.data.content = Buffer.concat(chunks) + contentLength = req.data.content.length + } + + await blobClient.upload(req.data.content, contentLength) + + const keys = { ID: targetID } + + const scanRequestJob = cds.spawn(async () => { + await scanRequest(req.target, keys) + }) + + scanRequestJob.on('error', async (err) => { + logConfig.withSuggestion('error', + 'Failed to initiate malware scan for attachment', err, + 'Check malware scanner configuration and connectivity', + { keys, errorMessage: err.message }) + }) + + logConfig.debug(`[Azure] Uploaded file using updateContentHandler for ${req.target.name}`) + } + } else if (req?.data?.note) { + const key = { ID: targetID } + await super.update(req.target, key, { note: req.data.note }) + logConfig.debug(`[Azure] Updated file upload with note for ${req.target.name}`) + } else { + next() + } + } + + /** + * @inheritdoc + */ + registerDraftUpdateHandlers(srv, mediaElements) { + for (const mediaElement of mediaElements) { + srv.prepend(() => { + if (mediaElement.drafts) { + srv.on( + "PUT", + mediaElement.drafts, + this.updateContentHandler.bind(this) + ) + + // case: attachments uploaded in draft and deleted before saving + srv.before( + "DELETE", + mediaElement.drafts, + this.attachDraftDeletionData.bind(this) + ) + srv.after( + "DELETE", + mediaElement.drafts, + this.deleteAttachmentsWithKeys.bind(this) + ) + } + }) + } + } + + /** + * Deletes a file from Azure Blob Storage + * @param {string} Key - The key of the file to delete + * @returns {Promise} - Promise resolving when deletion is complete + */ + async delete(blobName) { + const tenantID = cds.context.tenant + logConfig.debug(`[Azure] Executing delete for file ${blobName} in bucket ${this.containerName}`) + + // Check separate object store instances + if (separateObjectStore) { + await this.createAzureClient(tenantID) + } + + const blobClient = this.containerClient.getBlockBlobClient(blobName) + const response = await blobClient.delete() + return response._response.status === 202 + } + + /** + * Requires implementation as delete of infected attachment is specific to storage service + * @inheritdoc + */ + async deleteInfectedAttachment(Attachments, key) { + const response = await SELECT.from(Attachments, key).columns('url') + return await this.delete(response.url) + } +} diff --git a/lib/helper.js b/lib/helper.js index e879a7ce..4ecd85e4 100644 --- a/lib/helper.js +++ b/lib/helper.js @@ -1,53 +1,117 @@ const axios = require('axios') const https = require("https") const { logConfig } = require('./logger') +const cds = require('@sap/cds') + +/** + * Validates the presence of required Service Manager credentials + * @param {*} serviceManagerCreds - Service Manager credentials object + * @throws Will throw an error if validation fails + */ +function validateServiceManagerCredentials(serviceManagerCreds) { + if (!serviceManagerCreds) { + logConfig.configValidation('serviceManager.credentials', serviceManagerCreds, false, + 'Bind a Service Manager instance for separate object store mode') + throw new Error("Service Manager Instance is not bound") + } -async function getObjectStoreCredentials(tenantID, sm_url, token) { - logConfig.processStep('Fetching object store credentials', { tenantID, sm_url }) + const requiredSmFields = ['sm_url', 'url', 'clientid'] + const missingSmFields = requiredSmFields.filter(field => !serviceManagerCreds[field]) + + if (missingSmFields.length > 0) { + logConfig.configValidation('serviceManager.credentials', serviceManagerCreds, false, + `Service Manager credentials missing: ${missingSmFields.join(', ')}`) + throw new Error(`Missing Service Manager credentials: ${missingSmFields.join(', ')}`) + } +} - // Validate inputs +/** + * Validates the inputs required for fetching object store credentials + * @param {string} tenantID - Tenant ID + * @param {string} sm_url - Service Manager URL + * @param {string} token - Access token + * @returns + */ +function validateInputs(tenantID, sm_url, token) { if (!tenantID) { logConfig.withSuggestion('error', 'Tenant ID is required for object store credentials', null, 'Ensure multitenancy is properly configured and tenant context is available', { tenantID }) - return null + return false } if (!sm_url) { logConfig.configValidation('serviceManager.credentials.sm_url', sm_url, false, 'Bind a Service Manager instance to your application') - return null + return false } if (!token) { logConfig.withSuggestion('error', 'Access token is required for Service Manager API', null, 'Check if token fetching completed successfully', { hasToken: !!token }) + return false + } + + return true +} + +/** + * Fetches object store service binding from Service Manager + * @param {string} sm_url - Service Manager URL + * @param {string} tenantID - Tenant ID + * @param {string} token - Access token + * @returns {Promise} - Promise resolving to array of service bindings + */ +async function fetchObjectStoreBinding(sm_url, tenantID, token) { + logConfig.debug('Making Service Manager API call', { + tenantID, + endpoint: `${sm_url}/v1/service_bindings`, + labelQuery: `service eq 'OBJECT_STORE' and tenant_id eq '${tenantID}'` + }) + + const response = await axios.get(`${sm_url}/v1/service_bindings`, { + params: { labelQuery: `service eq 'OBJECT_STORE' and tenant_id eq '${tenantID}'` }, + headers: { + 'Accept': 'application/json', + 'Authorization': `Bearer ${token}`, + 'Content-Type': 'application/json' + } + }) + + return response.data?.items || [] +} + +/** + * Retrieves object store credentials for a given tenant + * @param {*} tenantID - Tenant ID + * @returns {Promise} - Promise resolving to object store credentials or null + */ +async function getObjectStoreCredentials(tenantID) { + const serviceManagerCreds = cds.env.requires?.serviceManager?.credentials + + validateServiceManagerCredentials(serviceManagerCreds) + + const { sm_url, url, clientid, clientsecret, certificate, key, certurl } = serviceManagerCreds + + logConfig.debug('Fetching access token for tenant', { tenantID, sm_url: sm_url }) + const token = await fetchToken(url, clientid, clientsecret, certificate, key, certurl) + + logConfig.processStep('Fetching object store credentials', { tenantID, sm_url }) + + if (!validateInputs(tenantID, sm_url, token)) { return null } try { - logConfig.debug('Making Service Manager API call', { - tenantID, - endpoint: `${sm_url}/v1/service_bindings`, - labelQuery: `service eq 'OBJECT_STORE' and tenant_id eq '${tenantID}'` - }) - - const response = await axios.get(`${sm_url}/v1/service_bindings`, { - params: { labelQuery: `service eq 'OBJECT_STORE' and tenant_id eq '${tenantID}'` }, - headers: { - 'Accept': 'application/json', - 'Authorization': `Bearer ${token}`, - 'Content-Type': 'application/json' - } - }) + const items = await fetchObjectStoreBinding(sm_url, tenantID, token) - if (!response.data?.items?.length) { + if (!items.length) { logConfig.withSuggestion('error', `No object store service binding found for tenant`, null, 'Ensure an Object Store instance is subscribed and bound for this tenant', - { tenantID, itemsFound: response.data?.items?.length || 0 }) + { tenantID, itemsFound: 0 }) return null } - const credentials = response.data.items[0] + const credentials = items[0] logConfig.info('Object store credentials retrieved successfully', { tenantID, hasCredentials: !!credentials, @@ -72,6 +136,16 @@ async function getObjectStoreCredentials(tenantID, sm_url, token) { } } +/** + * Fetches an OAuth token using either client credentials or MTLS + * @param {string} url - Token endpoint URL + * @param {string} clientid - Client ID + * @param {string} clientsecret - Client Secret + * @param {string} certificate - MTLS Certificate + * @param {string} key - MTLS Key + * @param {string} certURL - MTLS Certificate URL + * @returns {Promise} - Promise resolving to access token + */ async function fetchToken(url, clientid, clientsecret, certificate, key, certURL) { logConfig.processStep('Determining token fetch method', { hasClientCredentials: !!(clientid && clientsecret), @@ -106,6 +180,13 @@ async function fetchToken(url, clientid, clientsecret, certificate, key, certURL } } +/** + * Fetches OAuth token using client credentials flow + * @param {string} url - Token endpoint URL + * @param {string} clientid - Client ID + * @param {string} clientsecret - Client Secret + * @returns + */ async function fetchTokenWithClientSecret(url, clientid, clientsecret) { const startTime = Date.now() @@ -130,7 +211,7 @@ async function fetchTokenWithClientSecret(url, clientid, clientsecret) { }) const duration = Date.now() - startTime - logConfig.debug('OAuth token fetched successfully', { clientid, duration, tokenType: response.data.token_type }) + logConfig.debug('OAuth token fetched successfully', { clientid, duration, tokenType: response.data?.token_type }) return response.data.access_token } catch (error) { @@ -150,6 +231,14 @@ async function fetchTokenWithClientSecret(url, clientid, clientsecret) { } } +/** + * Fetches OAuth token using MTLS authentication + * @param {string} certURL - Certificate URL + * @param {string} clientid - Client ID + * @param {string} certificate - MTLS Certificate + * @param {string} key - MTLS Key + * @returns {Promise} - Promise resolving to access token + */ async function fetchTokenWithMTLS(certURL, clientid, certificate, key) { const startTime = Date.now() diff --git a/lib/malwareScanner.js b/lib/malwareScanner.js index 2bccd331..d612707a 100644 --- a/lib/malwareScanner.js +++ b/lib/malwareScanner.js @@ -2,7 +2,7 @@ const cds = require('@sap/cds') const { SELECT } = cds.ql const { logConfig } = require('./logger') -async function scanRequest(Attachments, key, req) { +async function scanRequest(Attachments, key) { logConfig.processStep('Initiating malware scan request', { attachmentEntity: Attachments.name, fileId: key.ID @@ -137,7 +137,7 @@ async function scanRequest(Attachments, key, req) { fileId: key.ID, entity: currEntity.name }) - await AttachmentsSrv.deleteInfectedAttachment(currEntity, key, req) + await AttachmentsSrv.deleteInfectedAttachment(currEntity, key) } else if (status === "Clean") { logConfig.info('Malware scan completed - file is clean', { fileId: key.ID, diff --git a/lib/mtx/server.js b/lib/mtx/server.js index f5eb010e..45079029 100644 --- a/lib/mtx/server.js +++ b/lib/mtx/server.js @@ -3,6 +3,7 @@ const axios = require('axios') const https = require("https") const { logConfig } = require('../logger') const { S3Client, paginateListObjectsV2, DeleteObjectsCommand } = require('@aws-sdk/client-s3') +const { BlobServiceClient } = require('@azure/storage-blob') const PATH = { SERVICE_INSTANCE: "v1/service_instances", @@ -465,43 +466,15 @@ cds.on('listening', async () => { const creds = cds.env.requires?.objectStore?.credentials if (!creds) throw new Error("SAP Object Store instance credentials not found.") - const client = new S3Client({ - region: creds.region, - credentials: { - accessKeyId: creds.access_key_id, - secretAccessKey: creds.secret_access_key, - }, - }) - - const bucket = creds.bucket - const keysToDelete = [] - - try { - const paginator = paginateListObjectsV2({ client }, { - Bucket: bucket, - Prefix: tenant, - }) - - for await (const page of paginator) { - page.Contents?.forEach(obj => { - keysToDelete.push({ Key: obj.Key }) - }) - } - - if (keysToDelete.length > 0) { - await client.send(new DeleteObjectsCommand({ - Bucket: bucket, - Delete: { Objects: keysToDelete }, - })) - logConfig.debug('S3 objects deleted for tenant', { tenant, deletedCount: keysToDelete.length }) - } else { - logConfig.debug('No S3 objects found for tenant during cleanup', { tenant }) - } - } catch (error) { - logConfig.withSuggestion('error', - `Failed to clean up S3 objects for tenant "${tenant}"`, error, - 'Check AWS S3 connectivity and permissions', - { tenant }) + switch (cds.env.requires?.attachments?.kind) { + case "s3": + await _cleanupAWSS3Objects(creds, tenant) + break + case "azure": + await _cleanupAzureBlobObjects(creds, tenant) + break + default: + logConfig.warn('Unsupported object store kind for cleanup', { kind: cds.env.requires?.attachments?.kind, tenant }) } }) @@ -510,6 +483,81 @@ cds.on('listening', async () => { module.exports = cds.server }) +/** + * Cleanup for AWS S3 objects for a given tenant + * @param {*} creds - AWS S3 credentials + * @param {string} tenant - Tenant ID + */ +const _cleanupAWSS3Objects = async (creds, tenant) => { + const client = new S3Client({ + region: creds.region, + credentials: { + accessKeyId: creds.access_key_id, + secretAccessKey: creds.secret_access_key, + }, + }) + + const bucket = creds.bucket + const keysToDelete = [] + + try { + const paginator = paginateListObjectsV2({ client }, { + Bucket: bucket, + Prefix: tenant, + }) + + for await (const page of paginator) { + page.Contents?.forEach(obj => { + keysToDelete.push({ Key: obj.Key }) + }) + } + + if (keysToDelete.length > 0) { + await client.send(new DeleteObjectsCommand({ + Bucket: bucket, + Delete: { Objects: keysToDelete }, + })) + logConfig.debug('[AWS S3] S3 objects deleted for tenant', { tenant, deletedCount: keysToDelete.length }) + } else { + logConfig.debug('[AWS S3] No S3 objects found for tenant during cleanup', { tenant }) + } + } catch (error) { + logConfig.withSuggestion('error', + `Failed to clean up S3 objects for tenant "${tenant}"`, error, + 'Check AWS S3 connectivity and permissions', + { tenant }) + } +} + +/** + * Cleanup for Azure Blob Storage objects for a given tenant + * @param {*} creds - Azure Blob Storage credentials + * @param {string} tenant - Tenant ID + */ +const _cleanupAzureBlobObjects = async (creds, tenant) => { + const blobServiceClient = new BlobServiceClient(`${creds.container_uri}?${creds.sas_token}`) + const containerClient = blobServiceClient.getContainerClient(creds.container) + + try { + const blobsToDelete = [] + for await (const blob of containerClient.listBlobsFlat({ prefix: tenant })) { + blobsToDelete.push(blob.name) + } + + for (const blobName of blobsToDelete) { + const blockBlobClient = containerClient.getBlockBlobClient(blobName) + await blockBlobClient.delete() + } + + logConfig.debug('[Azure] Azure Blob objects deleted for tenant', { tenant, deletedCount: blobsToDelete.length }) + } catch (error) { + logConfig.withSuggestion('error', + `Failed to clean up Azure Blob objects for tenant "${tenant}"`, error, + 'Check Azure Blob Storage connectivity and permissions', + { tenant }) + } +} + module.exports = { _fetchToken, _validateSMCredentials, diff --git a/package.json b/package.json index 20bab401..49b40ee2 100644 --- a/package.json +++ b/package.json @@ -18,8 +18,9 @@ "test": "npx jest --runInBand" }, "dependencies": { - "@aws-sdk/client-s3": "^3.400.0", - "@aws-sdk/lib-storage": "^3.515.0", + "@aws-sdk/client-s3": "^3.918.0", + "@aws-sdk/lib-storage": "^3.918.0", + "@azure/storage-blob": "^12.29.1", "axios": "^1.4.0" }, "devDependencies": { @@ -47,6 +48,9 @@ }, "attachments-s3": { "impl": "@cap-js/attachments/lib/aws-s3" + }, + "attachments-azure": { + "impl": "@cap-js/attachments/lib/azure-blob-storage" } }, "serviceManager": { diff --git a/tests/unit/unitTests.test.js b/tests/unit/unitTests.test.js index 64029739..9b951edc 100644 --- a/tests/unit/unitTests.test.js +++ b/tests/unit/unitTests.test.js @@ -1,4 +1,4 @@ -let mockAttachmentsSrv, key, req = {} +let mockAttachmentsSrv, key = {} jest.mock('@sap/cds', () => ({ ql: { UPDATE: jest.fn(() => ({ with: jest.fn() })) }, @@ -72,7 +72,6 @@ beforeEach(() => { status: 200 })) key = { ID: 'test-id' } - req = {} }) describe('scanRequest', () => { @@ -82,7 +81,7 @@ describe('scanRequest', () => { json: () => Promise.resolve({ malwareDetected: false }) }) ) - await scanRequest({ name: 'Attachments' }, key, req) + await scanRequest({ name: 'Attachments' }, key) expect(mockAttachmentsSrv.update).toHaveBeenCalled() expect(mockAttachmentsSrv.deleteInfectedAttachment).not.toHaveBeenCalled() }) @@ -94,14 +93,14 @@ describe('scanRequest', () => { status: 200 }) ) - await scanRequest({ name: 'Attachments' }, key, req) + await scanRequest({ name: 'Attachments' }, key) expect(mockAttachmentsSrv.deleteInfectedAttachment).toHaveBeenCalled() expect(mockAttachmentsSrv.update).toHaveBeenCalled() }) it('should update status to "Failed" if fetch throws', async () => { global.fetch = jest.fn(() => { throw new Error('Network error') }) - await scanRequest({ name: 'Attachments' }, key, req) + await scanRequest({ name: 'Attachments' }, key) expect(mockAttachmentsSrv.update).toHaveBeenCalledWith(expect.anything(), key, { status: 'Failed' }) }) @@ -122,30 +121,42 @@ describe('scanRequest', () => { describe('getObjectStoreCredentials', () => { it('should return credentials from service manager', async () => { + cds.env.requires.serviceManager = { + credentials: { + sm_url: 'https://sm.example.com', + url: 'https://token.example.com', + clientid: 'client-id', + clientsecret: 'client-secret' + } + } + axios.get.mockResolvedValue({ data: { items: [{ id: 'test-cred' }] } }) - const creds = await getObjectStoreCredentials('tenant', 'url', 'token') + axios.post.mockResolvedValue({ data: { access_token: 'test-token' } }) + + const creds = await getObjectStoreCredentials('tenant') expect(creds.id).toBe('test-cred') }) it('should return null when tenant ID is missing', async () => { - const creds = await getObjectStoreCredentials(null, 'url', 'token') - expect(creds).toBeNull() - }) - - it('should return null when sm_url is missing', async () => { - const creds = await getObjectStoreCredentials('tenant', null, 'token') - expect(creds).toBeNull() - }) + cds.env.requires.serviceManager = { + credentials: { + sm_url: 'https://sm.example.com', + url: 'https://token.example.com', + clientid: 'client-id', + clientsecret: 'client-secret' + } + } - it('should return null when token is missing', async () => { - const creds = await getObjectStoreCredentials('tenant', 'url', null) + const creds = await getObjectStoreCredentials(null) expect(creds).toBeNull() }) - it('should handle error gracefully and return null', async () => { - axios.get.mockRejectedValue(new Error('fail')) - const creds = await getObjectStoreCredentials('tenant', 'url', 'token') - expect(creds).toBeNull() + it('should throw error if credentials are missing', async () => { + try { + await getObjectStoreCredentials('tenant') + } catch (err) { + expect(err.message).toBe('Service Manager Instance is not bound') + } }) })