SDK Integration
OSS is fully compatible with AWS S3 SDKs, allowing you to integrate object storage into your applications using familiar AWS SDK patterns.
Configuration Requirements
All AWS SDKs need these OSS settings:
Parameter | Value |
---|---|
Endpoint URL | https://<CustomerId>.oss.swiftserve.com |
Access Key ID | Your OSS Access Key |
Secret Access Key | Your OSS Secret Key |
Region | us-east-1 |
Path Style | True (important) |
Python (boto3)
Installation & Setup
pip install boto3
import boto3
from botocore.config import Config
s3_client = boto3.client(
's3',
endpoint_url='https://<CustomerId>.oss.swiftserve.com',
aws_access_key_id='your-access-key',
aws_secret_access_key='your-secret-key',
region_name='us-east-1',
config=Config(s3={'addressing_style': 'path'})
)
Common Operations
# List buckets
buckets = s3_client.list_buckets()['Buckets']
# Create bucket
s3_client.create_bucket(Bucket='my-bucket')
# Upload file
s3_client.upload_file('local.txt', 'my-bucket', 'remote.txt')
# Download file
s3_client.download_file('my-bucket', 'remote.txt', 'local.txt')
# List objects
objects = s3_client.list_objects_v2(Bucket='my-bucket')
# Delete object
s3_client.delete_object(Bucket='my-bucket', Key='file.txt')
Advanced Features
# Presigned URL (1 hour expiry)
url = s3_client.generate_presigned_url(
'get_object',
Params={'Bucket': 'my-bucket', 'Key': 'file.txt'},
ExpiresIn=3600
)
# Upload with metadata
s3_client.upload_file(
'local.txt', 'my-bucket', 'remote.txt',
ExtraArgs={'Metadata': {'author': 'username'}}
)
Node.js (AWS SDK v3)
Installation & Setup
npm install @aws-sdk/client-s3
import { S3Client } from '@aws-sdk/client-s3';
const s3Client = new S3Client({
endpoint: 'https://<CustomerId>.oss.swiftserve.com',
credentials: {
accessKeyId: 'your-access-key',
secretAccessKey: 'your-secret-key'
},
region: 'us-east-1',
forcePathStyle: true
});
Common Operations
import {
ListBucketsCommand,
CreateBucketCommand,
PutObjectCommand,
GetObjectCommand
} from '@aws-sdk/client-s3';
// List buckets
const buckets = await s3Client.send(new ListBucketsCommand({}));
// Create bucket
await s3Client.send(new CreateBucketCommand({ Bucket: 'my-bucket' }));
// Upload file
const fileContent = fs.readFileSync('local.txt');
await s3Client.send(new PutObjectCommand({
Bucket: 'my-bucket',
Key: 'remote.txt',
Body: fileContent
}));
// Download file
const response = await s3Client.send(new GetObjectCommand({
Bucket: 'my-bucket',
Key: 'remote.txt'
}));
Java (AWS SDK v2)
Maven Dependency
<dependency>
<groupId>software.amazon.awssdk</groupId>
<artifactId>s3</artifactId>
<version>2.20.0</version>
</dependency>
Setup & Usage
import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
import software.amazon.awssdk.services.s3.S3Client;
import software.amazon.awssdk.services.s3.S3Configuration;
S3Client s3Client = S3Client.builder()
.credentialsProvider(StaticCredentialsProvider.create(
AwsBasicCredentials.create("access-key", "secret-key")))
.endpointOverride(URI.create("https://<CustomerId>.oss.swiftserve.com"))
.region(Region.US_EAST_1)
.serviceConfiguration(S3Configuration.builder()
.pathStyleAccessEnabled(true).build())
.build();
// Create bucket
s3Client.createBucket(CreateBucketRequest.builder()
.bucket("my-bucket").build());
// Upload file
s3Client.putObject(PutObjectRequest.builder()
.bucket("my-bucket").key("remote.txt").build(),
RequestBody.fromFile(new File("local.txt")));
.NET (AWS SDK)
NuGet Package
<PackageReference Include="AWSSDK.S3" Version="3.7.0" />
Setup & Usage
using Amazon.S3;
using Amazon.S3.Model;
var config = new AmazonS3Config
{
ServiceURL = "https://<CustomerId>.oss.swiftserve.com",
ForcePathStyle = true,
RegionEndpoint = RegionEndpoint.USEast1
};
var s3Client = new AmazonS3Client("access-key", "secret-key", config);
// Create bucket
await s3Client.PutBucketAsync(new PutBucketRequest
{
BucketName = "my-bucket"
});
// Upload file
await s3Client.PutObjectAsync(new PutObjectRequest
{
BucketName = "my-bucket",
Key = "remote.txt",
FilePath = "local.txt"
});
Go (AWS SDK v2)
Installation
go get github.com/aws/aws-sdk-go-v2/service/s3
go get github.com/aws/aws-sdk-go-v2/credentials
Setup & Usage
import (
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/s3"
)
cfg := aws.Config{
Region: "us-east-1",
Credentials: credentials.NewStaticCredentialsProvider(
"access-key", "secret-key", ""),
}
client := s3.NewFromConfig(cfg, func(o *s3.Options) {
o.BaseEndpoint = aws.String("https://<CustomerId>.oss.swiftserve.com")
o.UsePathStyle = true
})
// Create bucket
_, err := client.CreateBucket(context.TODO(), &s3.CreateBucketInput{
Bucket: aws.String("my-bucket"),
})
// Upload file
_, err = client.PutObject(context.TODO(), &s3.PutObjectInput{
Bucket: aws.String("my-bucket"),
Key: aws.String("remote.txt"),
Body: strings.NewReader("file content"),
})
Best Practices
Environment Configuration
# Use environment variables for credentials
import os
s3_client = boto3.client(
's3',
endpoint_url=f"https://{os.getenv('OSS_CUSTOMER_ID')}.oss.swiftserve.com",
aws_access_key_id=os.getenv('OSS_ACCESS_KEY'),
aws_secret_access_key=os.getenv('OSS_SECRET_KEY'),
region_name='us-east-1',
config=Config(s3={'addressing_style': 'path'})
)
Error Handling
from botocore.exceptions import ClientError
try:
s3_client.upload_file('local.txt', 'my-bucket', 'remote.txt')
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchBucket':
print("Bucket does not exist")
else:
print(f"Upload failed: {e}")
Performance Optimization
# Configure for better performance
from botocore.config import Config
config = Config(
retries={'max_attempts': 3},
max_pool_connections=50
)
s3_client = boto3.client('s3', config=config)
Security Guidelines
- Never hardcode credentials in source code
- Use environment variables or credential files
- Implement proper error handling without exposing sensitive data
- Use HTTPS endpoints for all connections
- Rotate access keys regularly
- Monitor API usage for unusual patterns
For command-line operations, see AWS CLI. For GUI management, see S3Browser.