Commit 5126f942 authored by lijingang's avatar lijingang

Local development files before merging with remote

parents
node_modules/
*.log
.DS_Store
coverage/
dist/
.env
npm-debug.log*
yarn-debug.log*
yarn-error.log*
*.tgz
\ No newline at end of file
# @youdatasum/client-s3
S3 operations client for internal use.
## Installation
### As a global CLI tool
```bash
npm install -g @youdatasum/client-s3
```
Or via GitLab npm registry (internal). Alternatively, install directly from GitLab repository:
```bash
npm install -g git+ssh://git@git.youdatasum.com:ljg/client-s3.git
```
### As a library in your project
```bash
npm install @youdatasum/client-s3
```
Or via Git:
```bash
npm install git+ssh://git@git.youdatasum.com:ljg/client-s3.git
```
## Usage
### CLI
After global installation, you can use `ss3ops` command:
```bash
# Download a file
ss3ops download <bucket> <key> <destination>
# Upload a file
ss3ops upload <bucket> <key> <source>
# Upload a folder
ss3ops upload-folder <bucket> <prefix> <folder>
# Download a folder
ss3ops download-folder <bucket> <prefix> <destination>
# List objects in a bucket
ss3ops list <bucket> [prefix]
```
### Node.js library
```javascript
const { s3, downloadFileFromS3, uploadFileToS3, uploadFolderToS3, downloadS3Folder } = require('@youdatasum/client-s3');
// Use the pre-configured S3 client (connected to internal RustFS endpoint)
await s3.send(...);
// Download a file
await downloadFileFromS3('my-bucket', 'path/to/file.txt', './local.txt');
// Upload a file
const fs = require('fs');
const body = fs.readFileSync('./local.txt');
await uploadFileToS3('my-bucket', 'path/to/uploaded.txt', body);
// Upload a folder
await uploadFolderToS3('./local-folder', 'my-bucket', 'remote-prefix/');
// Download a folder
await downloadS3Folder('my-bucket', 'remote-prefix/', './downloads/');
```
## Configuration
The S3 client is pre-configured to connect to an internal RustFS endpoint. Credentials and endpoint are hardcoded in the library. If you need to customize, you can modify the `lib/s3-client.js` file.
## Development
1. Clone the repository.
2. Run `npm install`.
3. Make changes.
4. Test with `node bin/ss3ops.js`.
## Publishing
This package is published to the internal GitLab npm registry. Update version in `package.json` and run:
```bash
npm publish
```
Ensure you have proper permissions and `publishConfig` points to the correct registry.
## License
MIT
\ No newline at end of file
#!/usr/bin/env node
const { s3, downloadFileFromS3, uploadFileToS3, uploadFolderToS3, downloadS3Folder } = require('../lib/index.js');
const fs = require('fs');
const path = require('path');
// Parse command line arguments
const args = process.argv.slice(2);
const command = args[0];
if (!command) {
console.error('Usage: ss3ops <command> [options]');
console.error('Commands:');
console.error(' download <bucket> <key> <destination> Download a file from S3');
console.error(' upload <bucket> <key> <source> Upload a file to S3');
console.error(' upload-folder <bucket> <prefix> <folder> Upload a folder to S3');
console.error(' download-folder <bucket> <prefix> <destination> Download a folder from S3');
console.error(' list <bucket> [prefix] List objects in bucket');
process.exit(1);
}
async function main() {
try {
switch (command) {
case 'download':
if (args.length < 4) {
console.error('Error: missing arguments');
console.error('Usage: ss3ops download <bucket> <key> <destination>');
process.exit(1);
}
const [bucket, key, destination] = args.slice(1);
await downloadFileFromS3(bucket, key, destination);
console.log(`Downloaded ${key} to ${destination}`);
break;
case 'upload':
if (args.length < 4) {
console.error('Error: missing arguments');
console.error('Usage: ss3ops upload <bucket> <key> <source>');
process.exit(1);
}
const [upBucket, upKey, source] = args.slice(1);
const body = fs.readFileSync(source);
await uploadFileToS3(upBucket, upKey, body);
console.log(`Uploaded ${source} to ${upBucket}/${upKey}`);
break;
case 'upload-folder':
if (args.length < 4) {
console.error('Error: missing arguments');
console.error('Usage: ss3ops upload-folder <bucket> <prefix> <folder>');
process.exit(1);
}
const [folderBucket, prefix, folderPath] = args.slice(1);
await uploadFolderToS3(folderPath, folderBucket, prefix);
console.log(`Uploaded folder ${folderPath} to ${folderBucket}/${prefix}`);
break;
case 'download-folder':
if (args.length < 4) {
console.error('Error: missing arguments');
console.error('Usage: ss3ops download-folder <bucket> <prefix> <destination>');
process.exit(1);
}
const [dlBucket, dlPrefix, dlDestination] = args.slice(1);
await downloadS3Folder(dlBucket, dlPrefix, dlDestination);
console.log(`Downloaded folder ${dlPrefix} to ${dlDestination}`);
break;
case 'list':
if (args.length < 2) {
console.error('Error: missing bucket');
console.error('Usage: ss3ops list <bucket> [prefix]');
process.exit(1);
}
const listBucket = args[1];
const listPrefix = args[2] || '';
const { ListObjectsV2Command } = require('@aws-sdk/client-s3');
const response = await s3.send(new ListObjectsV2Command({ Bucket: listBucket, Prefix: listPrefix }));
const contents = response.Contents || [];
contents.forEach(obj => console.log(`${obj.Key} (${obj.Size} bytes)`));
break;
default:
console.error(`Unknown command: ${command}`);
process.exit(1);
}
} catch (error) {
console.error('Error:', error.message);
process.exit(1);
}
}
main();
\ No newline at end of file
const s3Client = require('./s3-client.js');
module.exports = s3Client;
// Also export individual functions for convenience
module.exports.s3 = s3Client.s3;
module.exports.downloadFileFromS3 = s3Client.downloadFileFromS3;
module.exports.uploadFileToS3 = s3Client.uploadFileToS3;
module.exports.uploadFolderToS3 = s3Client.uploadFolderToS3;
module.exports.downloadS3Folder = s3Client.downloadS3Folder;
\ No newline at end of file
//ERfTPtr9H5VdyasN0hCn lOA7cJSzTY2nEgVeCR5WQhIf1tN9HvDjGmrpoP0w
//npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner
// 动态引入第三方库
let S3Client, NodeHttpHandler;
const { writeFile } = require("fs/promises");
const fs = require("fs");
const path = require("path");
const { pipeline } = require("stream/promises"); // Node.js 15+ 支持,用于流式传输
// import { readFileSync } from "fs";
// 动态创建S3客户端
let s3;
try {
// 动态引入第三方库
({ S3Client } = require("@aws-sdk/client-s3"));
({ NodeHttpHandler } = require("@smithy/node-http-handler"));
s3 = new S3Client({
endpoint: "http://127.0.0.1:9000", // RustFS endpoint
region: "us-east-1", // Any value is accepted
credentials: {
accessKeyId: "ERfTPtr9H5VdyasN0hCn",
secretAccessKey: "lOA7cJSzTY2nEgVeCR5WQhIf1tN9HvDjGmrpoP0w",
},
forcePathStyle: true, // Must be enabled for RustFS compatibility
requestHandler: new NodeHttpHandler({
connectionTimeout: 3000,
socketTimeout: 5000,
}),
});
} catch (error) {
console.error("Failed to initialize S3 client:", error.message);
s3 = null;
}
/**
* Downloads a file from the specified S3 bucket and key, and writes it to the local file system.
* @param {string} bucket - The name of the S3 bucket.
* @param {string} key - The key of the file to download.
* @param {string} destination - The local file path to save the downloaded file.
*/
const downloadFileFromS3 = async (bucket, key, destination) => {
const { GetObjectCommand } = require("@aws-sdk/client-s3");
const response = await s3.send(
new GetObjectCommand({ Bucket: bucket, Key: key })
);
const streamToBuffer = async (stream) => {
const chunks = [];
for await (const chunk of stream) chunks.push(chunk);
return Buffer.concat(chunks);
};
const buffer = await streamToBuffer(response.Body);
await writeFile(destination, buffer);
};
/**
* Uploads a file to the specified S3 bucket and key.
* @param {string} bucket - The name of the S3 bucket.
* @param {string} key - The key where the file will be stored.
* @param {Buffer|string} body - The content of the file to upload.
*/
const uploadFileToS3 = async (bucket, key, body) => {
const { PutObjectCommand } = require("@aws-sdk/client-s3");
await s3.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: body,
})
);
};
/**
* Uploads all files in a folder to an S3 bucket.
* @param {string} folderPath - The local folder path.
* @param {string} bucket - The name of the S3 bucket.
* @param {string} prefix - The prefix to add to the S3 keys (optional).
*/
const uploadFolderToS3 = async (folderPath, bucket, prefix = "") => {
const { PutObjectCommand } = require("@aws-sdk/client-s3");
const files = fs.readdirSync(folderPath);
for (const file of files) {
const filePath = path.join(folderPath, file);
const fileKey = path.join(prefix, file);
if (fs.statSync(filePath).isFile()) {
const fileContent = fs.readFileSync(filePath);
await s3.send(
new PutObjectCommand({
Bucket: bucket,
Key: fileKey,
Body: fileContent,
})
);
console.log(`Uploaded ${fileKey}`);
}
}
};
/**
* 下载 S3 上指定"文件夹"的所有内容到本地
* @param {string} bucketName - S3 存储桶名称
* @param {string} folderPrefix - S3 上的文件夹前缀 (例如: "data/logs/2023" 或 "data/logs/2023/")
* @param {string} localDestination - 本地保存的目标文件夹路径
*/
async function downloadS3Folder(bucketName, folderPrefix, localDestination) {
const { ListObjectsV2Command, GetObjectCommand } = require("@aws-sdk/client-s3");
// 确保前缀不以 / 开头,但逻辑上我们通常处理带 / 的路径
// 标准化前缀:去掉开头的斜杠,保留内部结构
const cleanPrefix = folderPrefix.startsWith('/') ? folderPrefix.slice(1) : folderPrefix;
// 确保本地目标目录存在
if (!fs.existsSync(localDestination)) {
fs.mkdirSync(localDestination, { recursive: true });
}
console.log(`开始下载 s3://${bucketName}/${cleanPrefix}${localDestination}...`);
let continuationToken = undefined;
let downloadedCount = 0;
do {
// 1. 列出该前缀下的所有对象 (分页处理)
const listCommand = new ListObjectsV2Command({
Bucket: bucketName,
Prefix: cleanPrefix,
ContinuationToken: continuationToken,
});
const listResponse = await s3.send(listCommand);
const objects = listResponse.Contents || [];
if (objects.length === 0 && !listResponse.IsTruncated) {
console.log("未找到任何文件或文件夹为空。");
return;
}
// 2. 遍历并下载每个对象
for (const object of objects) {
const key = object.Key;
// S3 有时会返回代表"空文件夹"的对象(Key 以 / 结尾),跳过它们
if (key.endsWith('/')) {
continue;
}
// 计算本地文件路径
// 逻辑:本地路径 = 目标目录 + (S3 Key 去掉前缀的部分)
const relativePath = key.substring(cleanPrefix.length);
const localFilePath = path.join(localDestination, relativePath);
// 创建本地目录结构 (如果文件在子文件夹中)
const localDir = path.dirname(localFilePath);
if (!fs.existsSync(localDir)) {
fs.mkdirSync(localDir, { recursive: true });
}
// 3. 下载文件内容
try {
const getCommand = new GetObjectCommand({
Bucket: bucketName,
Key: key,
});
const response = await s3.send(getCommand);
// 使用流式管道将 S3 响应体直接写入本地文件 (节省内存,适合大文件)
// response.Body 是一个 ReadableStream
await pipeline(response.Body, fs.createWriteStream(localFilePath));
downloadedCount++;
console.log(`[已下载] ${key} -> ${localFilePath}`);
} catch (err) {
console.error(`下载失败 ${key}:`, err.message);
}
}
// 更新 Token 以获取下一页
continuationToken = listResponse.IsTruncated ? listResponse.NextContinuationToken : undefined;
} while (continuationToken);
console.log(`\n完成!共下载 ${downloadedCount} 个文件。`);
}
module.exports = { s3, downloadFileFromS3, uploadFileToS3, uploadFolderToS3, downloadS3Folder }; // Export the S3 client, download, and upload functions
// Example usage - commented out to prevent auto-execution:
/*
(async () => {
const res = await s3.send(new ListObjectsV2Command({ Bucket: "test" }));
res.Contents?.forEach((obj) => console.log(`${obj.Key} (${obj.Size} bytes)`));
// Example call to the download function
// await downloadFileFromS3("test", "AUTHORS", "downloaded.txt");
// Example call to the upload function
// const fs = require("fs");
// const fileContent = fs.readFileSync("downloaded.txt");
// await uploadFileToS3("test", "uploaded.txt", fileContent);
// --- 使用示例 ---
const BUCKET_NAME = "test";
const S3_FOLDER = "remote-prefix/"; // S3 上的文件夹路径
const LOCAL_PATH = "./downloads/assets"; // 本地保存路径
downloadS3Folder(BUCKET_NAME, S3_FOLDER, LOCAL_PATH)
.catch(console.error);
})();
*/
\ No newline at end of file
{
"name": "@youdatasum/client-s3",
"version": "1.0.0",
"description": "S3 operations client for internal use",
"main": "lib/index.js",
"bin": {
"ss3ops": "./bin/ss3ops.js"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": ["s3", "aws", "client", "internal"],
"author": "Youdatasum",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+ssh://git@git.youdatasum.com:ljg/client-s3.git"
},
"files": [
"bin/",
"lib/",
"package.json",
"README.md"
],
"publishConfig": {
"registry": "https://git.youdatasum.com/api/v4/projects/<project_id>/packages/npm/"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.0.0",
"@smithy/node-http-handler": "^2.0.0"
},
"devDependencies": {
"jest": "^29.0.0"
},
"engines": {
"node": ">=14.0.0"
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment