diff --git a/README.md b/README.md index 5c3527a3..145648af 100755 --- a/README.md +++ b/README.md @@ -126,6 +126,14 @@ async main() { Assuming `test.png` is 1,800,000 bytes this will upload the first 1,000,000 bytes to `http://my.server.com/test.png.1` and the next 800,000 bytes to `http://my.server.com/test.png.2`. +## Debugging +To enable debug output when using `node-httptransfer` library, set the `DEBUG` environment variable to `httptransfer:*`. +You can also specify a specific loglevel per [./lib/logger.js](./lib/logger.js), e.g.: + +```bash +$ DEBUG='httptransfer:warn' npm run test +``` + ## Testbed A CLI tool [testbed](./testbed/index.js) is provided to try out the `node-httptransfer` functionality. It supports uploading, downloading, and transferring file content. It also supports Azure Blob stores through Shared Access Signature (SAS) urls created on the fly. diff --git a/lib/aem/createassetservletrequestgenerator.js b/lib/aem/createassetservletrequestgenerator.js index 18545fe2..d79a83dc 100644 --- a/lib/aem/createassetservletrequestgenerator.js +++ b/lib/aem/createassetservletrequestgenerator.js @@ -15,6 +15,7 @@ const FormData = require("form-data"); const { HttpRequestGenerator } = require("../asset/httprequestgenerator"); const { HTTP } = require("../constants"); +const logger = require("../logger"); const FILE_OFFSET = "file@Offset"; const CHUNK_LENGTH = "chunk@Length"; @@ -38,7 +39,7 @@ class CreateAssetServletRequestGenerator extends HttpRequestGenerator { if (this.isPartChunked(transferPart.totalSize, contentRange)) { // this is a chunk of the file - add more information to form const { low, length } = contentRange || {}; - console.log(`Create asset servlet upload process adding form elements file@Offset=${low}, chunk@Length=${length}, file@Length=${transferPart.totalSize}`); + logger.info(`Create asset servlet upload process adding form elements file@Offset=${low}, chunk@Length=${length}, file@Length=${transferPart.totalSize}`); form.append(FILE_OFFSET, low); form.append(CHUNK_LENGTH, length); form.append(FILE_LENGTH, transferPart.totalSize); diff --git a/lib/block/blockdownload.js b/lib/block/blockdownload.js index 8675476d..74e17c44 100644 --- a/lib/block/blockdownload.js +++ b/lib/block/blockdownload.js @@ -31,6 +31,7 @@ const { RandomFileAccess } = require('../randomfileaccess'); const { FilterFailedAssets } = require('../functions/filterfailedassets'); const { GetAssetMetadata } = require('../functions/getassetmetadata'); const { BlockRequestGenerator } = require('../asset/blockrequestgenerator'); +const logger = require("../logger"); /** * Generate AEM download transfer assets @@ -40,7 +41,7 @@ const { BlockRequestGenerator } = require('../asset/blockrequestgenerator'); * @yields {TransferAsset} Transfer asset */ async function* generateBlockDownloadTransfer(options) { - console.log(`Generating block download transfer parts`); + logger.info(`Generating block download transfer parts`); const expectedLength = options.downloadFiles && (options.downloadFiles.length || options.downloadFiles.size); let assetCounter = 0; @@ -58,12 +59,12 @@ async function* generateBlockDownloadTransfer(options) { }); assetCounter++; - console.log(`Generated download transfer asset ${assetCounter} of ${expectedLength}`); + logger.info(`Generated download transfer asset ${assetCounter} of ${expectedLength}`); yield transferAsset; } - console.log(`Generated ${assetCounter} download transfer assets (files to download: ${expectedLength})`); + logger.info(`Generated ${assetCounter} download transfer assets (files to download: ${expectedLength})`); } /** @@ -110,7 +111,7 @@ class BlockDownload extends EventEmitter { this.errorEvents = []; controller.on(TransferEvents.CREATE_TRANSFER_PARTS, transferEvent => { - console.log("Block download: block download controller starting file download"); + logger.info("Block download: block download controller starting file download"); this.emit("filestart", transferEvent.transferAsset.eventData); }); @@ -122,12 +123,12 @@ class BlockDownload extends EventEmitter { }); controller.on(TransferEvents.AFTER_JOIN_TRANSFER_PARTS, transferEvent => { - console.log("Block download: block download controller finishing file download"); + logger.info("Block download: block download controller finishing file download"); this.emit("fileend", transferEvent.transferAsset.eventData); }); controller.on(TransferEvents.ERROR, transferEvent => { - console.log(`Error during block download: ${transferEvent.error}`); + logger.info(`Error during block download: ${transferEvent.error}`); this.errorEvents.push(transferEvent); if (transferEvent.props.firstError) { @@ -162,7 +163,7 @@ class BlockDownload extends EventEmitter { controller.removeAllListeners(TransferEvents.AFTER_JOIN_TRANSFER_PARTS); controller.removeAllListeners(TransferEvents.ERROR); } catch (err) { - console.log(`Failed to remove event listeners from block download controller: ${err}`); + logger.info(`Failed to remove event listeners from block download controller: ${err}`); } } @@ -190,13 +191,13 @@ class BlockDownload extends EventEmitter { ); pipeline.setFilterFunction(new FilterFailedAssets()); - console.log("Block download: executing block download pipeline"); + logger.info("Block download: executing block download pipeline"); await executePipeline(pipeline, generateBlockDownloadTransfer(options), controller); - console.log("Block download: finished executing block download pipeline"); + logger.info("Block download: finished executing block download pipeline"); if (this.errorEvents && this.errorEvents.length > 0) { // throw the first emitted error during transfer - console.log(`Errors encountered during block download (${this.errorEvents.length} total error(s))`); + logger.info(`Errors encountered during block download (${this.errorEvents.length} total error(s))`); throw this.errorEvents[0].error; } } finally { @@ -206,11 +207,11 @@ class BlockDownload extends EventEmitter { if (randomFileAccess) { await randomFileAccess.close(); - console.log("Block download: closed random file accessor"); + logger.info("Block download: closed random file accessor"); } if (controller) { await controller.cleanupFailedTransfers(); - console.log("Block download: cleaned up failed transfers"); + logger.info("Block download: cleaned up failed transfers"); } } } diff --git a/lib/block/blockupload.js b/lib/block/blockupload.js index 7018cbe6..36428184 100644 --- a/lib/block/blockupload.js +++ b/lib/block/blockupload.js @@ -34,6 +34,7 @@ const { IllegalArgumentError } = require("../error"); const { getFileStats } = require('../util'); const { AssetMultipart } = require("../asset/assetmultipart"); const { BlockRequestGenerator } = require("../asset/blockrequestgenerator"); +const logger = require("../logger"); const DEFAULT_MAX_CONCURRENCY = 8; // Default part size is 10mb @@ -49,7 +50,7 @@ const DEFAULT_PART_SIZE = 10 * 1024 * 1024; * @yields {TransferAsset} Transfer asset */ async function* generateBlockUploadTransfer(options) { - console.log(`Generating block upload transfer parts`); + logger.info(`Generating block upload transfer parts`); const expectedLength = options.uploadFiles && (options.uploadFiles.length || options.uploadFiles.size); let assetCounter = 0; @@ -67,10 +68,10 @@ async function* generateBlockUploadTransfer(options) { if (typeof uploadFile.fileUrl === "object" && Array.isArray(uploadFile.fileUrl) && uploadFile.fileUrl.length > 0) { - console.log("Multiple uploads to run"); + logger.info("Multiple uploads to run"); assetTarget = uploadFile.fileUrl[0]; } else { - console.log("Single upload to run"); + logger.info("Single upload to run"); assetTarget = uploadFile.fileUrl; } const targetUrl = new URL(assetTarget); @@ -79,13 +80,13 @@ async function* generateBlockUploadTransfer(options) { const target = new Asset(targetUrl, options.headers, uploadFile.multipartHeaders); if (!uploadFile.fileSize) { - console.log("Getting transfer asset size from file to upload"); + logger.info("Getting transfer asset size from file to upload"); const { size } = await getFileStats(uploadFile.filePath); uploadFile.fileSize = size; } const contentType = options.headers && options.headers['content-type']; - console.log(`Transfer asset to upload is of content type ${contentType} and size ${uploadFile.fileSize} bytes`); + logger.info(`Transfer asset to upload is of content type ${contentType} and size ${uploadFile.fileSize} bytes`); const transferAsset = new TransferAsset(source, target, { acceptRanges: true, metadata: new AssetMetadata(uploadFile.filePath, contentType, uploadFile.fileSize) @@ -96,7 +97,7 @@ async function* generateBlockUploadTransfer(options) { const minPartSize = uploadFile.minPartSize || Math.min(10, maxPartSize); // maxPartSize must be defined if (typeof uploadURIs === "object" && Array.isArray(uploadURIs) && uploadURIs.length > 0) { - console.log(`Upload target is multipart ( ${uploadURIs.length} parts), min part size: ${minPartSize}, max part size: ${maxPartSize}`); + logger.info(`Upload target is multipart ( ${uploadURIs.length} parts), min part size: ${minPartSize}, max part size: ${maxPartSize}`); transferAsset.multipartTarget = new AssetMultipart( uploadURIs, minPartSize, @@ -106,12 +107,12 @@ async function* generateBlockUploadTransfer(options) { } assetCounter++; - console.log(`Generated upload transfer asset ${assetCounter} of ${expectedLength}`); + logger.info(`Generated upload transfer asset ${assetCounter} of ${expectedLength}`); yield transferAsset; } - console.log(`Generated ${assetCounter} upload transfer assets (files to upload: ${expectedLength})`); + logger.info(`Generated ${assetCounter} upload transfer assets (files to upload: ${expectedLength})`); } class BlockUpload extends EventEmitter { @@ -155,7 +156,7 @@ class BlockUpload extends EventEmitter { this.errorEvents = []; controller.on(TransferEvents.TRANSFER, transferEvent => { - console.log("Block upload: block upload controller starting part upload"); + logger.info("Block upload: block upload controller starting part upload"); this.emit("transferPart", transferEvent.transferAsset.eventData); }); @@ -167,12 +168,12 @@ class BlockUpload extends EventEmitter { }); controller.on(TransferEvents.AFTER_TRANSFER, transferEvent => { - console.log("Block upload: block upload controller finishing part upload"); + logger.info("Block upload: block upload controller finishing part upload"); this.emit("aftertransfer", transferEvent.transferAsset.eventData); }); controller.on(TransferEvents.ERROR, transferEvent => { - console.log(`Error during block upload: ${transferEvent.error}`); + logger.info(`Error during block upload: ${transferEvent.error}`); if (transferEvent.props.firstError) { this.errorEvents.push(transferEvent); this.emit("fileerror", { @@ -207,7 +208,7 @@ class BlockUpload extends EventEmitter { controller.removeAllListeners(TransferEvents.AFTER_JOIN_TRANSFER_PARTS); controller.removeAllListeners(TransferEvents.ERROR); } catch (err) { - console.log(`Failed to remove event listeners from block upload controller: ${err}`); + logger.info(`Failed to remove event listeners from block upload controller: ${err}`); } } @@ -234,14 +235,14 @@ class BlockUpload extends EventEmitter { ); pipeline.setFilterFunction(new FilterFailedAssets); - console.log("Block upload: executing block upload pipeline"); + logger.info("Block upload: executing block upload pipeline"); await executePipeline(pipeline, generateBlockUploadTransfer(options), controller); - console.log("Block upload: finished executing block upload pipeline"); + logger.info("Block upload: finished executing block upload pipeline"); if (this.errorEvents && this.errorEvents.length > 0) { // delete file (not needed - as AEM won't commit the blob in case of error) // throw the first emitted error - console.log(`Errors encountered during block upload (${this.errorEvents.length} total error(s))`); + logger.info(`Errors encountered during block upload (${this.errorEvents.length} total error(s))`); throw this.errorEvents[0].error; } } finally { @@ -251,7 +252,7 @@ class BlockUpload extends EventEmitter { if (randomFileAccess) { await randomFileAccess.close(); - console.log("Block upload: closed random file accessor"); + logger.info("Block upload: closed random file accessor"); } } } diff --git a/lib/functions/getassetmetadata.js b/lib/functions/getassetmetadata.js index 2c8b90be..bf929847 100644 --- a/lib/functions/getassetmetadata.js +++ b/lib/functions/getassetmetadata.js @@ -79,9 +79,9 @@ class GetAssetMetadata extends AsyncGeneratorFunction { && transferAsset.metadata.filename && transferAsset.metadata.contentType && transferAsset.metadata.contentLength) { - console.log(`Transfer asset has all needed metadata to proceed (content-type: ${transferAsset.metadata.contentType}, content length: ${transferAsset.metadata.contentLength})`); + logger.info(`Transfer asset has all needed metadata to proceed (content-type: ${transferAsset.metadata.contentType}, content length: ${transferAsset.metadata.contentLength})`); } else { - console.log("Transfer asset needs to acquire additional metadata. Executing metadata request"); + logger.info("Transfer asset needs to acquire additional metadata. Executing metadata request"); await retry(async (options) => { // S3 doesn't support HEAD requests against presigned URLs // TODO: 0-byte file support for S3 which results in a 416 error @@ -92,7 +92,7 @@ class GetAssetMetadata extends AsyncGeneratorFunction { requestOptions: options && options.requestOptions }); transferAsset.acceptRanges = headers.get("accept-ranges") === "bytes"; - console.log(`Server accepts ranges: ${transferAsset.acceptRanges} (accept-ranges header set to bytes)`); + logger.info(`Server accepts ranges: ${transferAsset.acceptRanges} (accept-ranges header set to bytes)`); transferAsset.metadata = new AssetMetadata( getFilename(headers), getContentType(headers), diff --git a/lib/functions/transfer.js b/lib/functions/transfer.js index 8ff8066a..8d21d4b5 100644 --- a/lib/functions/transfer.js +++ b/lib/functions/transfer.js @@ -86,23 +86,23 @@ class Transfer extends AsyncGeneratorFunction { throw Error(`'contentRange.length' too large, not supported yet: ${transferPart.source.url}`); } - console.log(`Transferred content range: ${contentRange}`); + logger.info(`Transferred content range: ${contentRange}`); if (isFileProtocol(transferPart.source.url) && targetUrl) { - console.log("Source has protocol 'file'"); + logger.info("Source has protocol 'file'"); const buf = await this.randomFileAccess.read(transferPart.source.url, contentRange); if (contentRange) { if (buf && buf.length) { - console.log(`Read file ${transferPart.source.url} for content range low ${contentRange.low}, to high ${contentRange.high}, read ${buf.length} bytes`); + logger.info(`Read file ${transferPart.source.url} for content range low ${contentRange.low}, to high ${contentRange.high}, read ${buf.length} bytes`); } else { - console.log(`Nothing read from file for content range low ${contentRange.low}, to high ${contentRange.high}`); + logger.info(`Nothing read from file for content range low ${contentRange.low}, to high ${contentRange.high}`); } } else { if (buf && buf.length) { - console.log(`Read file ${transferPart.source.url} (no content range), read ${buf.length} bytes`); + logger.info(`Read file ${transferPart.source.url} (no content range), read ${buf.length} bytes`); } else { - console.log(`Nothing read from file (no content range)`); + logger.info(`Nothing read from file (no content range)`); } } @@ -132,7 +132,7 @@ class Transfer extends AsyncGeneratorFunction { await issuePut(targetUrl, requestOptions); }, this.options); } else if (transferPart.source.blob && targetUrl) { - console.log(`Source is blob, transferring ranges low ${contentRange.low}, to high ${contentRange.high}`); + logger.info(`Source is blob, transferring ranges low ${contentRange.low}, to high ${contentRange.high}`); const blob = transferPart.source.blob.slice(contentRange.low, contentRange.high + 1); await retry(async () => { const body = this.requestGenerator.createPartHttpBody({ @@ -160,7 +160,7 @@ class Transfer extends AsyncGeneratorFunction { await issuePut(targetUrl, requestOptions); }, this.options); } else if (targetUrl && isFileProtocol(targetUrl) && transferPart.source.url) { - console.log("Target has protocol 'file'"); + logger.info("Target has protocol 'file'"); await retry(async () => { const totalSize = transferPart.metadata.contentLength; @@ -185,7 +185,7 @@ class Transfer extends AsyncGeneratorFunction { throw Error(`Server does not seem to have respected Range header. Expected range ${contentRange.low}-${contentRange.high}, content length is ${contentLength}B`); } - console.log("Converting stream to buffer to get response data"); + logger.info("Converting stream to buffer to get response data"); const buffer = await streamToBuffer(HTTP.METHOD.GET, transferPart.source.url, response.status, response.body, contentLength); await this.randomFileAccess.write(targetUrl, contentRange, buffer, totalSize); }, this.options); diff --git a/lib/functions/transferpartscreate.js b/lib/functions/transferpartscreate.js index db577899..c8216950 100644 --- a/lib/functions/transferpartscreate.js +++ b/lib/functions/transferpartscreate.js @@ -64,7 +64,7 @@ class CreateTransferParts extends AsyncGeneratorFunction { for await (const transferAsset of transferAssets) { try { controller.notify(TransferEvents.CREATE_TRANSFER_PARTS, this.name, transferAsset); - console.log(`Server accepts ranges for this transfer asset: ${transferAsset.acceptRanges}`); + logger.info(`Server accepts ranges for this transfer asset: ${transferAsset.acceptRanges}`); const contentLength = transferAsset.metadata.contentLength; if (transferAsset.acceptRanges && transferAsset.multipartTarget) { @@ -77,7 +77,7 @@ class CreateTransferParts extends AsyncGeneratorFunction { yield transferPart; ++idx; } - console.log(`${idx} transfer parts created (multipart target)`); + logger.info(`${idx} transfer parts created (multipart target)`); } else if (transferAsset.acceptRanges && isFileProtocol(transferAsset.target.url)) { const targetUrls = [transferAsset.target.url]; const partSize = this.preferredPartSize || DEFAULT_FILE_TARGET_PART_SIZE; @@ -88,7 +88,7 @@ class CreateTransferParts extends AsyncGeneratorFunction { yield transferPart; ++idx; } - console.log(`${idx} transfer parts created (target uses file protocol)`); + logger.info(`${idx} transfer parts created (target uses file protocol)`); } else { const targetUrls = [transferAsset.target.url]; const contentRange = new DRange(0, contentLength - 1); diff --git a/lib/randomfileaccess.js b/lib/randomfileaccess.js index 8a18d81f..dc7cc013 100644 --- a/lib/randomfileaccess.js +++ b/lib/randomfileaccess.js @@ -14,6 +14,7 @@ const PRIVATE = Symbol("PRIVATE"); const { FileHandle, FileFlags } = require("./filehandle"); +const logger = require("./logger"); /** * @typedef {Object} WriteResponse @@ -42,7 +43,7 @@ const { FileHandle, FileFlags } = require("./filehandle"); * @returns {FileHandle} File handle */ async function openExtendFile(path, flags, size = 0) { - console.log(`Opening file ${path}, extend size is ${size} (with flags ${flags})`); + logger.info(`Opening file ${path}, extend size is ${size} (with flags ${flags})`); const handle = await FileHandle.open(path, flags); try { @@ -71,11 +72,11 @@ async function getOrOpen(fileHandles, path, flags, totalLength) { // will end up with the same fileHandlePromise let fileHandlePromise = fileHandles.get(path); if (!fileHandlePromise) { - console.log(`Opening file ${path} with flags ${flags} and total write-only length ${totalLength}`); + logger.info(`Opening file ${path} with flags ${flags} and total write-only length ${totalLength}`); fileHandlePromise = openExtendFile(path, flags, totalLength); fileHandles.set(path, fileHandlePromise); } else { - console.log(`Reusing existing filehandle promise for file ${path} (requested flags ${flags} and total write-only length ${totalLength})`); + logger.info(`Reusing existing filehandle promise for file ${path} (requested flags ${flags} and total write-only length ${totalLength})`); } // acquire the file handle @@ -115,13 +116,13 @@ class RandomFileAccess { const fileHandlePromise = fileHandles.get(path); if (fileHandlePromise) { - console.log(`Closing file handle for ${path}`); + logger.info(`Closing file handle for ${path}`); fileHandles.delete(path); const handle = await fileHandlePromise; await handle.close({ silent: true }); } } else { - console.log("Closing all file handles"); + logger.info("Closing all file handles"); const closingFileHandles = this[PRIVATE].fileHandles; this[PRIVATE].fileHandles = new Map(); for (const fileHandlePromise of closingFileHandles.values()) { @@ -143,7 +144,7 @@ class RandomFileAccess { async write(path, range, buffer, totalSize) { const { fileHandles } = this[PRIVATE]; const handle = await getOrOpen(fileHandles, path, FileFlags.WRITEONLY, totalSize); - console.log(`Writing buffer of size ${buffer.length} to file`); + logger.info(`Writing buffer of size ${buffer.length} to file`); return handle.write(buffer, 0, buffer.length, range.low); } @@ -157,18 +158,18 @@ class RandomFileAccess { async read(path, range) { const { fileHandles } = this[PRIVATE]; - console.log(`Getting file handle for reading file ${path}`); + logger.info(`Getting file handle for reading file ${path}`); const handle = await getOrOpen(fileHandles, path, "r"); const buffer = Buffer.allocUnsafe(range.length); - console.log(`Allocating ${range.length} for reading from file ${path}`); + logger.info(`Allocating ${range.length} for reading from file ${path}`); const { bytesRead } = await handle.read(buffer, 0, range.length, range.low); - console.log(`Read ${bytesRead} from file ${path}`); + logger.info(`Read ${bytesRead} from file ${path}`); return buffer.slice(0, bytesRead); } } module.exports = { RandomFileAccess -}; \ No newline at end of file +};