In this tutorial, we will show you how to replace Supabase Storage with AWS S3 for handling attachments in the React Native To-Do List demo app.
The AWS credentials should never be exposed directly on the client - it could expose access to the entire S3 bucket to the user. For this tutorial we have therefore decided to use the following workflow:
The following updates to the React Native To-Do List demo app are therefore required:
The following pre-requisites are required to complete this tutorial:
This tutorial assumes that you have an AWS account. If you do not have an AWS account, you can create one here.
To enable attachment storage using AWS S3, set up an S3 bucket by following these steps:
Create an S3 Bucket
Create bucket
.Object Ownership
, set ACLs disabled and ensure the bucket is private.Configure Permissions
Go to the Permissions tab and set up the following:
Create an IAM User
We need to create 3 Supabase Edge Functions to handle the S3 operations:
Before we create the Edge Functions, we need to set up the environment variables for the AWS S3 credentials. Create an .env
file in the root of your Supabase project, add and update
the values with your AWS S3 configuration created in Step 1:
AWS_ACCESS_KEY_ID=***
AWS_SECRET_ACCESS_KEY=***
AWS_S3_REGION=#region
AWS_S3_BUCKET_NAME=#bucket_name
For more information on getting started with a Supabase Edge Function, see the Supabase Getting Started Guide.
Security Note
The filename specified in each edge function request can pose security risks, such as enabling a user to overwrite another user’s files by using the same filename. To mitigate this, a common approach is to generate a random prefix or directory for each file. While it’s likely fine to omit this safeguard in the demo — since users can already read and delete any file — this should be addressed in a production environment.
Create the s3-upload
Edge Function by running the following in your Supabase project:
supabase functions new s3-upload
import { PutObjectCommand, S3Client } from "npm:@aws-sdk/client-s3";
import { getSignedUrl } from "npm:@aws-sdk/s3-request-presigner";
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
const AWS_ACCESS_KEY_ID = Deno.env.get('AWS_ACCESS_KEY_ID')!;
const AWS_SECRET_ACCESS_KEY = Deno.env.get('AWS_SECRET_ACCESS_KEY')!;
const AWS_REGION = Deno.env.get('AWS_S3_REGION')!;
const AWS_BUCKET_NAME = Deno.env.get('AWS_S3_BUCKET_NAME')!;
const accessControlAllowOrigin = "*";
Deno.serve(async (req) => {
if (req.method !== 'POST') {
return new Response(JSON.stringify({ error: 'Only POST requests are allowed' }), {
status: 405,
});
}
const { fileName, mediaType, expiresIn } = await req.json();
if (!fileName || !mediaType) {
return new Response(
JSON.stringify({ error: 'Missing required fields: fileName, mediaType or data' }),
{ status: 400 }
);
}
try {
const s3Client = new S3Client({
region: AWS_REGION,
credentials: {
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY
}
});
const expiry = expiresIn || 900;
const command = new PutObjectCommand({
Bucket: AWS_BUCKET_NAME,
Key: fileName,
ContentType: mediaType
});
const uploadUrl = await getSignedUrl(s3Client, command, { expiresIn: expiry })
return new Response(
JSON.stringify({
message: `UploadURL for ${fileName} created successfully.`,
uploadUrl: uploadUrl
}),
{ status: 200, headers: { "Content-Type": "application/json", 'Access-Control-Allow-Origin': accessControlAllowOrigin } }
);
} catch (err) {
return new Response(JSON.stringify({ error: `Error uploading file ${fileName}: ${err}`}), {
headers: { "Content-Type": "application/json" },
status: 500,
});
}
});
Create the s3-download
Edge Function by running the following in your Supabase project:
supabase functions new s3-download
import { GetObjectCommand, S3Client } from "npm:@aws-sdk/client-s3";
import { getSignedUrl } from "npm:@aws-sdk/s3-request-presigner";
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
const AWS_ACCESS_KEY_ID = Deno.env.get('AWS_ACCESS_KEY_ID')!;
const AWS_SECRET_ACCESS_KEY = Deno.env.get('AWS_SECRET_ACCESS_KEY')!;
const AWS_REGION = Deno.env.get('AWS_S3_REGION')!;
const AWS_BUCKET_NAME = Deno.env.get('AWS_S3_BUCKET_NAME')!;
const accessControlAllowOrigin = "*";
Deno.serve(async (req) => {
if (req.method !== 'POST') {
return new Response(JSON.stringify({ error: 'Only POST requests are allowed' }), {
status: 405,
});
}
const { fileName, expiresIn } = await req.json();
if (!fileName) {
return new Response(
JSON.stringify({ error: 'Missing required field: fileName' }),
{ status: 400 }
);
}
try {
const s3Client = new S3Client({
region: AWS_REGION,
credentials: {
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY
}
});
const expiry = expiresIn || 900;
const command = new GetObjectCommand({
Bucket: AWS_BUCKET_NAME,
Key: fileName
});
const downloadUrl = await getSignedUrl(s3Client, command, { expiresIn: expiry });
return new Response(
JSON.stringify({
message: `DownloadURL for ${fileName} created successfully.`,
downloadUrl: downloadUrl
}),
{ status: 200, headers: { "Content-Type": "application/json", 'Access-Control-Allow-Origin': accessControlAllowOrigin }}
);
} catch (err) {
return new Response(JSON.stringify({ error: `Error downloading file ${fileName}: ${err}`}), {
headers: { "Content-Type": "application/json" },
status: 500,
});
}
});
Create the s3-delete
Edge Function by running the following in your Supabase project:
supabase functions new s3-delete
import { DeleteObjectCommand, S3Client } from "npm:@aws-sdk/client-s3";
import "jsr:@supabase/functions-js/edge-runtime.d.ts";
const AWS_ACCESS_KEY_ID = Deno.env.get('AWS_ACCESS_KEY_ID')!;
const AWS_SECRET_ACCESS_KEY = Deno.env.get('AWS_SECRET_ACCESS_KEY')!;
const AWS_REGION = Deno.env.get('AWS_S3_REGION')!;
const AWS_BUCKET_NAME = Deno.env.get('AWS_S3_BUCKET_NAME')!;
Deno.serve(async (req) => {
if (req.method !== 'POST') {
return new Response(JSON.stringify({ error: 'Only POST requests are allowed' }), {
status: 405,
});
}
const { fileName } = await req.json();
if (!fileName) {
return new Response(
JSON.stringify({ error: 'Missing required field: fileName' }),
{ status: 400 }
);
}
try {
const s3Client = new S3Client({
region: AWS_REGION,
credentials: {
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY
}
});
const command = new DeleteObjectCommand({
Bucket: AWS_BUCKET_NAME,
Key: fileName
});
await s3Client.send(command);
return new Response(JSON.stringify({ message: `${fileName} deleted successfully from ${AWS_BUCKET_NAME}.` }), {
headers: { "Content-Type": "application/json" },
status: 200,
});
} catch (err) {
return new Response(JSON.stringify({ error: `Error deleting ${fileName} from ${AWS_BUCKET_NAME}: ${err}`}), {
headers: { "Content-Type": "application/json" },
status: 500,
});
}
});
Create a AWSStorageAdapter.ts
file in the demos/react-native-supabase-todolist/library/storage
directory and add the following contents:
import * as FileSystem from 'expo-file-system';
import { decode as decodeBase64 } from 'base64-arraybuffer';
import { StorageAdapter } from '@powersync/attachments';
import { SupabaseClient } from '@supabase/supabase-js';
interface S3Upload {
message: string;
uploadUrl: string;
}
interface S3Download {
message: string;
downloadUrl: string;
}
interface S3Delete {
message: string;
}
export class AWSStorageAdapter implements StorageAdapter {
constructor( public client: SupabaseClient ) {}
async uploadFile(
filename: string,
data: ArrayBuffer,
options?: {
mediaType?: string;
}
): Promise<void> {
const response = await this.client.functions.invoke<S3Upload>('s3-upload', {
body: {
fileName: filename,
mediaType: options?.mediaType
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach upload edge function, code=${response.error}`);
}
const { uploadUrl } = response.data;
try {
const body = new Uint8Array(data);
const response = await fetch(uploadUrl, {
method: "PUT",
headers: {
"Content-Length": body.length.toString(),
"Content-Type": options?.mediaType,
},
body: body,
});
console.log(`File: ${filename} uploaded successfully.`);
} catch (error) {
console.error('Error uploading file:', error);
throw error;
}
}
async downloadFile(filePath: string): Promise<Blob> {
const response = await this.client.functions.invoke<S3Download>('s3-download', {
body: {
fileName: filePath
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach download edge function, code=${response.error}`);
}
const { downloadUrl } = response.data;
try {
const downloadResponse = await fetch(downloadUrl, {
method: "GET",
});
return await downloadResponse.blob();
} catch (error) {
console.error('Error downloading file:', error);
throw error;
}
}
async deleteFile(uri: string, options?: { filename?: string }): Promise<void> {
if (await this.fileExists(uri)) {
await FileSystem.deleteAsync(uri);
}
const { filename } = options ?? {};
if (!filename) {
return;
}
try {
const response = await this.client.functions.invoke<S3Delete>('s3-delete', {
body: {
fileName: options?.filename,
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach delete edge function, code=${response.error}`);
}
const { message } = response.data;
console.log(message);
} catch (error) {
console.error(`Error deleting ${filename}:`, error);
}
}
async readFile(
fileURI: string,
options?: { encoding?: FileSystem.EncodingType; mediaType?: string }
): Promise<ArrayBuffer> {
const { encoding = FileSystem.EncodingType.UTF8 } = options ?? {};
const { exists } = await FileSystem.getInfoAsync(fileURI);
if (!exists) {
throw new Error(`File does not exist: ${fileURI}`);
}
const fileContent = await FileSystem.readAsStringAsync(fileURI, options);
if (encoding === FileSystem.EncodingType.Base64) {
return this.base64ToArrayBuffer(fileContent);
}
return this.stringToArrayBuffer(fileContent);
}
async writeFile(
fileURI: string,
base64Data: string,
options?: {
encoding?: FileSystem.EncodingType;
}
): Promise<void> {
const { encoding = FileSystem.EncodingType.UTF8 } = options ?? {};
await FileSystem.writeAsStringAsync(fileURI, base64Data, { encoding });
}
async fileExists(fileURI: string): Promise<boolean> {
const { exists } = await FileSystem.getInfoAsync(fileURI);
return exists;
}
async makeDir(uri: string): Promise<void> {
const { exists } = await FileSystem.getInfoAsync(uri);
if (!exists) {
await FileSystem.makeDirectoryAsync(uri, { intermediates: true });
}
}
async copyFile(sourceUri: string, targetUri: string): Promise<void> {
await FileSystem.copyAsync({ from: sourceUri, to: targetUri });
}
getUserStorageDirectory(): string {
return FileSystem.documentDirectory!;
}
async stringToArrayBuffer(str: string): Promise<ArrayBuffer> {
const encoder = new TextEncoder();
return encoder.encode(str).buffer;
}
/**
* Converts a base64 string to an ArrayBuffer
*/
async base64ToArrayBuffer(base64: string): Promise<ArrayBuffer> {
return decodeBase64(base64);
}
}
The AWSStorageAdapter
class implements a storage adapter for AWS S3, allowing file operations (upload, download, delete) with an S3 bucket.
async uploadFile(filename: string, data: ArrayBuffer, options?: { mediaType?: string; }): Promise<void>
s3-upload
Edge Function to get a pre-signed URL to upload the file
const response = await this.client.functions.invoke<S3Upload>('s3-upload', {
body: {
fileName: filename,
mediaType: options?.mediaType
}
});
// error handling
const { uploadUrl } = response.data;
const body = new Uint8Array(data);
await fetch(uploadUrl, {
method: "PUT",
headers: {
"Content-Length": body.length.toString(),
"Content-Type": options?.mediaType,
},
body: body,
});
async downloadFile(filePath: string): Promise<Blob>
s3-download
Edge Function to get a pre-signed URL to download the fileconst response = await this.client.functions.invoke<S3Download>('s3-download', {
body: {
fileName: filePath
}
});
// error handling
const { downloadUrl } = response.data;
const downloadResponse = await fetch(downloadUrl, {
method: "GET",
});
return await downloadResponse.blob();
async deleteFile(uri: string, options?: { filename?: string }): Promise<void>
Two-step deletion process:
s3-delete
Edge Functionconst response = await this.client.functions.invoke<S3Delete>('s3-delete', {
body: {
fileName: options?.filename,
}
});
Update the system.ts
file in the demos/react-native-supabase-todolist/library/config
directory to use the new AWSStorageAdapter
class (the highlighted lines are the only changes needed):
import '@azure/core-asynciterator-polyfill';
import { PowerSyncDatabase, createBaseLogger } from '@powersync/react-native';
import React from 'react';
import { type AttachmentRecord } from '@powersync/attachments';
import { KVStorage } from '../storage/KVStorage';
import { AppConfig } from '../supabase/AppConfig';
import { SupabaseConnector } from '../supabase/SupabaseConnector';
import { AppSchema } from './AppSchema';
import { PhotoAttachmentQueue } from './PhotoAttachmentQueue';
import { AWSStorageAdapter } from '../storage/AWSStorageAdapter';
createBaseLogger().useDefaults();
export class System {
kvStorage: KVStorage;
storage: AWSStorageAdapter;
supabaseConnector: SupabaseConnector;
powersync: PowerSyncDatabase;
attachmentQueue: PhotoAttachmentQueue | undefined = undefined;
constructor() {
this.kvStorage = new KVStorage();
this.supabaseConnector = new SupabaseConnector(this);
this.storage = new AWSStorageAdapter(this.supabaseConnector.client);
this.powersync = new PowerSyncDatabase({
schema: AppSchema,
database: {
dbFilename: 'sqlite.db'
}
});
/**
* The snippet below uses OP-SQLite as the default database adapter.
* You will have to uninstall `@journeyapps/react-native-quick-sqlite` and
* install both `@powersync/op-sqlite` and `@op-engineering/op-sqlite` to use this.
*
* import { OPSqliteOpenFactory } from '@powersync/op-sqlite'; // Add this import
*
* const factory = new OPSqliteOpenFactory({
* dbFilename: 'sqlite.db'
* });
* this.powersync = new PowerSyncDatabase({ database: factory, schema: AppSchema });
*/
if (AppConfig.supabaseBucket) {
this.attachmentQueue = new PhotoAttachmentQueue({
powersync: this.powersync,
storage: this.storage,
// Use this to handle download errors where you can use the attachment
// and/or the exception to decide if you want to retry the download
onDownloadError: async (attachment: AttachmentRecord, exception: any) => {
if (exception.toString() === 'StorageApiError: Object not found') {
return { retry: false };
}
return { retry: true };
}
});
}
}
async init() {
await this.powersync.init();
await this.powersync.connect(this.supabaseConnector);
if (this.attachmentQueue) {
await this.attachmentQueue.init();
}
}
}
export const system = new System();
export const SystemContext = React.createContext(system);
export const useSystem = () => React.useContext(SystemContext);
Ensure that all references toAppConfig.supabaseBucket
is replaced with the S3 bucket name in the React Native To-Do List demo app.
Obtaining the S3 bucket name in the client can be done by creating another Supabsae Edge Function that returns the bucket name. This ensures that all S3 information are kept on the server.
You can now run the app and test the attachment upload and download functionality.
import * as FileSystem from 'expo-file-system';
import { decode as decodeBase64 } from 'base64-arraybuffer';
import { StorageAdapter } from '@powersync/attachments';
import { AppConfig } from '../supabase/AppConfig';
import { SupabaseClient } from '@supabase/supabase-js';
interface S3Upload {
message: string;
uploadUrl: string;
}
interface S3Download {
message: string;
downloadUrl: string;
}
interface S3Delete {
message: string;
}
export class AWSStorageAdapter implements StorageAdapter {
constructor( public client: SupabaseClient ) {}
async uploadFile(
filename: string,
data: ArrayBuffer,
options?: {
mediaType?: string;
}
): Promise<void> {
const response = await this.client.functions.invoke<S3Upload>('s3-upload', {
body: {
fileName: filename,
mediaType: options?.mediaType
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach upload edge function, code=${response.error}`);
}
const { uploadUrl } = response.data;
try {
const body = new Uint8Array(data);
const response = await fetch(uploadUrl, {
method: "PUT",
headers: {
"Content-Length": body.length.toString(),
"Content-Type": options?.mediaType,
},
body: body,
});
console.log(`File: ${filename} uploaded successfully.`);
} catch (error) {
console.error('Error uploading file:', error);
throw error;
}
}
async downloadFile(filePath: string): Promise<Blob> {
const response = await this.client.functions.invoke<S3Download>('s3-download', {
body: {
fileName: filePath
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach download edge function, code=${response.error}`);
}
const { downloadUrl } = response.data;
try {
const downloadResponse = await fetch(downloadUrl, {
method: "GET",
});
return await downloadResponse.blob();
} catch (error) {
console.error('Error downloading file:', error);
throw error;
}
}
async deleteFile(uri: string, options?: { filename?: string }): Promise<void> {
if (await this.fileExists(uri)) {
await FileSystem.deleteAsync(uri);
}
const { filename } = options ?? {};
if (!filename) {
return;
}
try {
const response = await this.client.functions.invoke<S3Delete>('s3-delete', {
body: {
fileName: options?.filename
}
});
if (response.error || !response.data) {
throw new Error(`Failed to reach delete edge function, code=${response.error}`);
}
const { message } = response.data;
console.log(message);
} catch (error) {
console.error(`Error deleting ${filename}:`, error);
}
}
async readFile(
fileURI: string,
options?: { encoding?: FileSystem.EncodingType; mediaType?: string }
): Promise<ArrayBuffer> {
const { encoding = FileSystem.EncodingType.UTF8 } = options ?? {};
const { exists } = await FileSystem.getInfoAsync(fileURI);
if (!exists) {
throw new Error(`File does not exist: ${fileURI}`);
}
const fileContent = await FileSystem.readAsStringAsync(fileURI, options);
if (encoding === FileSystem.EncodingType.Base64) {
return this.base64ToArrayBuffer(fileContent);
}
return this.stringToArrayBuffer(fileContent);
}
async writeFile(
fileURI: string,
base64Data: string,
options?: {
encoding?: FileSystem.EncodingType;
}
): Promise<void> {
const { encoding = FileSystem.EncodingType.UTF8 } = options ?? {};
await FileSystem.writeAsStringAsync(fileURI, base64Data, { encoding });
}
async fileExists(fileURI: string): Promise<boolean> {
const { exists } = await FileSystem.getInfoAsync(fileURI);
return exists;
}
async makeDir(uri: string): Promise<void> {
const { exists } = await FileSystem.getInfoAsync(uri);
if (!exists) {
await FileSystem.makeDirectoryAsync(uri, { intermediates: true });
}
}
async copyFile(sourceUri: string, targetUri: string): Promise<void> {
await FileSystem.copyAsync({ from: sourceUri, to: targetUri });
}
getUserStorageDirectory(): string {
return FileSystem.documentDirectory!;
}
async stringToArrayBuffer(str: string): Promise<ArrayBuffer> {
const encoder = new TextEncoder();
return encoder.encode(str).buffer;
}
/**
* Converts a base64 string to an ArrayBuffer
*/
async base64ToArrayBuffer(base64: string): Promise<ArrayBuffer> {
return decodeBase64(base64);
}
}