Software: Apache/2.4.41 (Ubuntu). PHP/8.0.30 uname -a: Linux apirnd 5.4.0-204-generic #224-Ubuntu SMP Thu Dec 5 13:38:28 UTC 2024 x86_64 uid=33(www-data) gid=33(www-data) groups=33(www-data) Safe-mode: OFF (not secure) /var/www/html/sites/node_modules/@aws-sdk/lib-storage/dist-cjs/ drwxr-xr-x | |
| Viewing file: Select action/file-type: "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Upload = void 0;
const abort_controller_1 = require("@aws-sdk/abort-controller");
const client_s3_1 = require("@aws-sdk/client-s3");
const smithy_client_1 = require("@aws-sdk/smithy-client");
const events_1 = require("events");
const bytelength_1 = require("./bytelength");
const chunker_1 = require("./chunker");
const MIN_PART_SIZE = 1024 * 1024 * 5;
class Upload extends events_1.EventEmitter {
constructor(options) {
super();
this.MAX_PARTS = 10000;
this.queueSize = 4;
this.partSize = MIN_PART_SIZE;
this.leavePartsOnError = false;
this.tags = [];
this.concurrentUploaders = [];
this.uploadedParts = [];
this.isMultiPart = true;
this.queueSize = options.queueSize || this.queueSize;
this.partSize = options.partSize || this.partSize;
this.leavePartsOnError = options.leavePartsOnError || this.leavePartsOnError;
this.tags = options.tags || this.tags;
this.client = options.client;
this.params = options.params;
this.__validateInput();
this.totalBytes = (0, bytelength_1.byteLength)(this.params.Body);
this.bytesUploadedSoFar = 0;
this.abortController = new abort_controller_1.AbortController();
}
async abort() {
this.abortController.abort();
}
async done() {
return await Promise.race([this.__doMultipartUpload(), this.__abortTimeout(this.abortController.signal)]);
}
on(event, listener) {
this.uploadEvent = event;
return super.on(event, listener);
}
async __uploadUsingPut(dataPart) {
this.isMultiPart = false;
const params = { ...this.params, Body: dataPart.data };
const [putResult, endpoint] = await Promise.all([
this.client.send(new client_s3_1.PutObjectCommand(params)),
this.client.config.endpoint(),
]);
const locationKey = this.params
.Key.split("/")
.map((segment) => (0, smithy_client_1.extendedEncodeURIComponent)(segment))
.join("/");
const locationBucket = (0, smithy_client_1.extendedEncodeURIComponent)(this.params.Bucket);
const Location = this.client.config.forcePathStyle
? `${endpoint.protocol}//${endpoint.hostname}/${locationBucket}/${locationKey}`
: `${endpoint.protocol}//${locationBucket}.${endpoint.hostname}/${locationKey}`;
this.singleUploadResult = {
...putResult,
Bucket: this.params.Bucket,
Key: this.params.Key,
Location,
};
const totalSize = (0, bytelength_1.byteLength)(dataPart.data);
this.__notifyProgress({
loaded: totalSize,
total: totalSize,
part: 1,
Key: this.params.Key,
Bucket: this.params.Bucket,
});
}
async __createMultipartUpload() {
if (!this.createMultiPartPromise) {
const createCommandParams = { ...this.params, Body: undefined };
this.createMultiPartPromise = this.client.send(new client_s3_1.CreateMultipartUploadCommand(createCommandParams));
}
const createMultipartUploadResult = await this.createMultiPartPromise;
this.uploadId = createMultipartUploadResult.UploadId;
}
async __doConcurrentUpload(dataFeeder) {
for await (const dataPart of dataFeeder) {
if (this.uploadedParts.length > this.MAX_PARTS) {
throw new Error(`Exceeded ${this.MAX_PARTS} as part of the upload to ${this.params.Key} and ${this.params.Bucket}.`);
}
try {
if (this.abortController.signal.aborted) {
return;
}
if (dataPart.partNumber === 1 && dataPart.lastPart) {
return await this.__uploadUsingPut(dataPart);
}
if (!this.uploadId) {
await this.__createMultipartUpload();
if (this.abortController.signal.aborted) {
return;
}
}
const partResult = await this.client.send(new client_s3_1.UploadPartCommand({
...this.params,
UploadId: this.uploadId,
Body: dataPart.data,
PartNumber: dataPart.partNumber,
}));
if (this.abortController.signal.aborted) {
return;
}
this.uploadedParts.push({
PartNumber: dataPart.partNumber,
ETag: partResult.ETag,
...(partResult.ChecksumCRC32 && { ChecksumCRC32: partResult.ChecksumCRC32 }),
...(partResult.ChecksumCRC32C && { ChecksumCRC32C: partResult.ChecksumCRC32C }),
...(partResult.ChecksumSHA1 && { ChecksumSHA1: partResult.ChecksumSHA1 }),
...(partResult.ChecksumSHA256 && { ChecksumSHA256: partResult.ChecksumSHA256 }),
});
this.bytesUploadedSoFar += (0, bytelength_1.byteLength)(dataPart.data);
this.__notifyProgress({
loaded: this.bytesUploadedSoFar,
total: this.totalBytes,
part: dataPart.partNumber,
Key: this.params.Key,
Bucket: this.params.Bucket,
});
}
catch (e) {
if (!this.uploadId) {
throw e;
}
if (this.leavePartsOnError) {
throw e;
}
}
}
}
async __doMultipartUpload() {
const dataFeeder = (0, chunker_1.getChunk)(this.params.Body, this.partSize);
for (let index = 0; index < this.queueSize; index++) {
const currentUpload = this.__doConcurrentUpload(dataFeeder);
this.concurrentUploaders.push(currentUpload);
}
await Promise.all(this.concurrentUploaders);
if (this.abortController.signal.aborted) {
throw Object.assign(new Error("Upload aborted."), { name: "AbortError" });
}
let result;
if (this.isMultiPart) {
this.uploadedParts.sort((a, b) => a.PartNumber - b.PartNumber);
const uploadCompleteParams = {
...this.params,
Body: undefined,
UploadId: this.uploadId,
MultipartUpload: {
Parts: this.uploadedParts,
},
};
result = await this.client.send(new client_s3_1.CompleteMultipartUploadCommand(uploadCompleteParams));
}
else {
result = this.singleUploadResult;
}
if (this.tags.length) {
await this.client.send(new client_s3_1.PutObjectTaggingCommand({
...this.params,
Tagging: {
TagSet: this.tags,
},
}));
}
return result;
}
__notifyProgress(progress) {
if (this.uploadEvent) {
this.emit(this.uploadEvent, progress);
}
}
async __abortTimeout(abortSignal) {
return new Promise((resolve, reject) => {
abortSignal.onabort = () => {
const abortError = new Error("Upload aborted.");
abortError.name = "AbortError";
reject(abortError);
};
});
}
__validateInput() {
if (!this.params) {
throw new Error(`InputError: Upload requires params to be passed to upload.`);
}
if (!this.client) {
throw new Error(`InputError: Upload requires a AWS client to do uploads with.`);
}
if (this.partSize < MIN_PART_SIZE) {
throw new Error(`EntityTooSmall: Your proposed upload partsize [${this.partSize}] is smaller than the minimum allowed size [${MIN_PART_SIZE}] (5MB)`);
}
if (this.queueSize < 1) {
throw new Error(`Queue size: Must have at least one uploading queue.`);
}
}
}
exports.Upload = Upload;
|
:: Command execute :: | |
--[ c99shell v. 2.5 [PHP 8 Update] [24.05.2025] | Generation time: 0.0095 ]-- |