Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 8 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@
"@aws-sdk/credential-providers": "^3.1038.0",
"@smithy/shared-ini-file-loader": "^4.4.9",
"@tigrisdata/iam": "^2.1.1",
"@tigrisdata/storage": "^3.4.0",
"@tigrisdata/storage": "^3.5.1",
"commander": "^14.0.3",
"enquirer": "^2.4.1",
"jose": "^6.2.3",
Expand Down
62 changes: 13 additions & 49 deletions src/lib/cp.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getStorageConfig } from '@auth/provider.js';
import { get, head, list, put } from '@tigrisdata/storage';
import { copy, get, head, list, put } from '@tigrisdata/storage';
import { executeWithConcurrency } from '@utils/concurrency.js';
import { exitWithError } from '@utils/exit.js';
import { formatSize } from '@utils/format.js';
Expand Down Expand Up @@ -210,9 +210,11 @@ async function copyObject(
srcBucket: string,
srcKey: string,
destBucket: string,
destKey: string,
showProgress = false
destKey: string
): Promise<{ error?: string }> {
// Folder markers (zero-byte objects ending in `/`) are still
// created via put('') — CopyObject on a literal folder marker is
// ambiguous, and the marker has no payload to preserve.
if (srcKey.endsWith('/')) {
const { error: putError } = await put(destKey, '', {
config: {
Expand All @@ -228,56 +230,19 @@ async function copyObject(
return {};
}

// head() is unconditional now: we need the source's Content-Type
// to propagate it to the destination so a remote→remote copy
// doesn't strip the header.
const { data: headData } = await head(srcKey, {
config: {
...config,
bucket: srcBucket,
},
});
const fileSize = headData?.size;
const sourceContentType = headData?.contentType;

const { data, error: getError } = await get(srcKey, 'stream', {
config: {
...config,
bucket: srcBucket,
},
});

if (getError) {
return { error: getError.message };
}

const { error: putError } = await put(destKey, data, {
...calculateUploadParams(fileSize),
...(sourceContentType ? { contentType: sourceContentType } : {}),
onUploadProgress: showProgress
? ({ loaded }) => {
if (fileSize !== undefined && fileSize > 0) {
const pct = Math.round((loaded / fileSize) * 100);
process.stdout.write(
`\rCopying: ${formatSize(loaded)} / ${formatSize(fileSize)} (${pct}%)`
);
} else {
process.stdout.write(`\rCopying: ${formatSize(loaded)}`);
}
}
: undefined,
// Server-side CopyObject. No bytes flow through the client and the
// source's Content-Type / metadata are preserved automatically.
const { error: copyError } = await copy(srcKey, destKey, {
srcBucket,
destBucket,
config: {
...config,
bucket: destBucket,
},
});

if (showProgress) {
process.stdout.write('\r' + ' '.repeat(60) + '\r');
}

if (putError) {
return { error: putError.message };
if (copyError) {
return { error: copyError.message };
}

return {};
Expand Down Expand Up @@ -825,8 +790,7 @@ async function copyRemoteToRemote(
srcParsed.bucket,
srcParsed.path,
destParsed.bucket,
destKey,
!_jsonMode
destKey
);

if (result.error) {
Expand Down
81 changes: 28 additions & 53 deletions src/lib/mv.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { getStorageConfig } from '@auth/provider.js';
import { get, head, list, put, remove } from '@tigrisdata/storage';
import { copy, list, move, put, remove } from '@tigrisdata/storage';
import { exitWithError } from '@utils/exit.js';
import { formatSize } from '@utils/format.js';
import { confirm, requireInteractive } from '@utils/interactive.js';
import { getFormat, getOption } from '@utils/options.js';
import {
Expand All @@ -12,7 +11,6 @@ import {
parseRemotePath,
wildcardPrefix,
} from '@utils/path.js';
import { calculateUploadParams } from '@utils/upload.js';

let _jsonMode = false;

Expand Down Expand Up @@ -277,8 +275,7 @@ export default async function mv(options: Record<string, unknown>) {
srcPath.bucket,
srcPath.path,
destPath.bucket,
destKey,
!_jsonMode // show progress for single file (not in JSON mode)
destKey
);

if (result.error) {
Expand Down Expand Up @@ -308,12 +305,13 @@ async function moveObject(
srcBucket: string,
srcKey: string,
destBucket: string,
destKey: string,
showProgress = false
destKey: string
): Promise<{ error?: string }> {
// Handle folder markers specially (empty objects ending with /)
// Folder markers (zero-byte objects ending in `/`) are still
// recreated via put('') + remove(). The server's rename header is
// not meaningful for the marker itself and we want to preserve the
// existing semantics here.
if (srcKey.endsWith('/')) {
// Put empty string to destination (creates folder marker)
const { error: putError } = await put(destKey, '', {
config: {
...config,
Expand All @@ -325,7 +323,6 @@ async function moveObject(
return { error: putError.message };
}

// Delete source folder marker
const { error: removeError } = await remove(srcKey, {
config: {
...config,
Expand All @@ -342,61 +339,39 @@ async function moveObject(
return {};
}

// Get source object size and content-type for upload params and
// header propagation. Without this, a remote→remote move would
// strip the source's Content-Type.
const { data: headData } = await head(srcKey, {
config: {
...config,
bucket: srcBucket,
},
});
const fileSize = headData?.size;
const sourceContentType = headData?.contentType;
// Same-bucket: metadata-only rename via `X-Tigris-Rename: true`.
// One round-trip, no bytes through the client.
if (srcBucket === destBucket) {
const { error: moveError } = await move(srcKey, destKey, {
config: {
...config,
bucket: srcBucket,
},
});

// Get source object
const { data, error: getError } = await get(srcKey, 'stream', {
config: {
...config,
bucket: srcBucket,
},
});
if (moveError) {
return { error: moveError.message };
}

if (getError) {
return { error: getError.message };
return {};
}

// Put to destination
const { error: putError } = await put(destKey, data, {
...calculateUploadParams(fileSize),
...(sourceContentType ? { contentType: sourceContentType } : {}),
onUploadProgress: showProgress
? ({ loaded }) => {
if (fileSize !== undefined && fileSize > 0) {
const pct = Math.round((loaded / fileSize) * 100);
process.stdout.write(
`\rMoving: ${formatSize(loaded)} / ${formatSize(fileSize)} (${pct}%)`
);
} else {
process.stdout.write(`\rMoving: ${formatSize(loaded)}`);
}
}
: undefined,
// Cross-bucket: the server doesn't support move across buckets, so
// fall back to server-side CopyObject + DELETE. Still no bytes
// through the client.
const { error: copyError } = await copy(srcKey, destKey, {
srcBucket,
destBucket,
config: {
...config,
bucket: destBucket,
},
});

if (showProgress) {
process.stdout.write('\r' + ' '.repeat(60) + '\r');
}

if (putError) {
return { error: putError.message };
if (copyError) {
return { error: copyError.message };
}

// Delete source
const { error: removeError } = await remove(srcKey, {
config: {
...config,
Expand Down
52 changes: 52 additions & 0 deletions src/lib/objects/set-access.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import { getStorageConfig } from '@auth/provider.js';
import { setObjectAccess } from '@tigrisdata/storage';
import { failWithError } from '@utils/exit.js';
import { msg, printStart, printSuccess } from '@utils/messages.js';
import { getFormat, getOption } from '@utils/options.js';
import { resolveObjectArgs } from '@utils/path.js';

const context = msg('objects', 'set-access');

export default async function setAccess(options: Record<string, unknown>) {
printStart(context);

const format = getFormat(options);

const bucketArg = getOption<string>(options, ['bucket']);
const keyArg = getOption<string>(options, ['key']);
const access = getOption<string>(options, ['access', 'a', 'A']);

if (!bucketArg) {
failWithError(context, 'Bucket name or path is required');
}

const { bucket, key } = resolveObjectArgs(bucketArg, keyArg);

if (!key) {
failWithError(context, 'Object key is required');
}

if (access !== 'public' && access !== 'private') {
failWithError(context, '--access must be either "public" or "private"');
}

const config = await getStorageConfig();

const { error } = await setObjectAccess(key, {
access,
config: {
...config,
bucket,
},
});

if (error) {
failWithError(context, error);
}

if (format === 'json') {
console.log(JSON.stringify({ action: 'updated', bucket, key, access }));
}

printSuccess(context, { key, bucket, access });
}
27 changes: 18 additions & 9 deletions src/lib/objects/set.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { getStorageConfig } from '@auth/provider.js';
import { updateObject } from '@tigrisdata/storage';
import { move, setObjectAccess } from '@tigrisdata/storage';
import { failWithError } from '@utils/exit.js';
import { msg, printStart, printSuccess } from '@utils/messages.js';
import { getFormat, getOption } from '@utils/options.js';
Expand Down Expand Up @@ -35,14 +35,23 @@ export default async function setObject(options: Record<string, unknown>) {
}

const config = await getStorageConfig();
const finalConfig = { ...config, bucket };

const { error } = await updateObject(key, {
// Rename first so the access update targets the renamed object.
let currentKey = key;
if (newKey) {
const { error: moveError } = await move(key, newKey, {
config: finalConfig,
});
if (moveError) {
failWithError(context, moveError);
}
currentKey = newKey;
}

const { error } = await setObjectAccess(currentKey, {
access: access === 'public' ? 'public' : 'private',
...(newKey && { key: newKey }),
config: {
...config,
bucket,
},
config: finalConfig,
});

if (error) {
Expand All @@ -54,12 +63,12 @@ export default async function setObject(options: Record<string, unknown>) {
JSON.stringify({
action: 'updated',
bucket,
key,
key: currentKey,
access,
...(newKey ? { newKey } : {}),
})
);
}

printSuccess(context, { key, bucket });
printSuccess(context, { key: currentKey, bucket });
}
Loading