FAQ: Using file uploads with 3rd party file hosting services (cloudinary, S3, etc) #220
Replies: 2 comments 9 replies
-
Hey, I managed to get the Cloudinary working the best I can by making a plugin that attaches to the collection having slug Hope this helps. The only thing that was beyond me was to change FileURL used as One way to solve the broken URL problem would be to have the FileURL option in Payload Upload UI and apply the logic to use it instead when the user passes the value, that would fix it if I am not missing anything?! Here is the solution code and some questions and doubts along the way in the comments. My folder structure goes like this -
File - src/cloudinary/hooks/cloudinaryConfig.ts import { v2 as cloudinary } from 'cloudinary';
cloudinary.config({
cloud_name: process.env.CLOUDINARY_CLOUD_NAME,
api_key: process.env.CLOUDINARY_API_KEY,
api_secret: process.env.CLOUDINARY_API_SECRET,
});
export { cloudinary }; File - src/cloudinary/hooks/CloudinaryMediaHooks.ts import { AfterChangeHook, AfterDeleteHook, BeforeChangeHook } from 'payload/dist/collections/config/types';
import { cloudinary } from './cloudinaryConfig';
import streamifier from 'streamifier';
import { UploadApiResponse,UploadStream } from 'cloudinary';
import path from 'path';
import fs, { promises as Fs } from 'fs';
// Uploading file to cloudinary using streamifier
const streamUpload = (file: { data: Buffer }, id?: string): Promise<UploadApiResponse> => {
return new Promise<UploadApiResponse>((resolve, reject) => {
const options = {
folder: 'fab-store',
invalidate: true,
...(id && { public_id: id, folder: null }) // in case of updating the image, we will need the public_id
// but not the folder as it's already in the URL and if we pass
//the value then it will create file in sub-folder instead of updating.
};
const stream: UploadStream = cloudinary.uploader.upload_stream(options, (error, result) => {
if (result) {
resolve(result);
} else {
reject(error);
}
});
streamifier.createReadStream(file.data).pipe(stream);
});
};
const beforeChangeHook: BeforeChangeHook = async ({ data, req, operation }) => {
// DONE: get the file from the req, upload to cloudinary
const uploadedFile = req.files.file; // FIXME: PayloadRequest type have file instead of 'files' so that might need a fix
if (uploadedFile) {
const result = await streamUpload(uploadedFile, operation === 'update' ? data.cloudPublicId : null);
data.cloudPublicId = result.public_id;
data.cloudinaryURL = result.secure_url;
}
return data;
};
async function exists(filePath: string) {
try {
await Fs.access(filePath);
return true;
} catch {
return false;
}
}
async function deleteFile(filePath: string) {
const fileExists = await exists(filePath);
if (fileExists)
fs.unlink(filePath, (err) => {
if (err) {
console.log(err);
throw err;
}
});
}
const afterChangeHook: AfterChangeHook = ({ doc, operation }) => {
// DONE: find the doc by the doc.filename on the hard drive of the server
// if it exists, delete it
if (doc?.filename) {
const mainFilePath = path.resolve(__dirname + `../../../cloudinary_media/${doc.filename}`);
deleteFile(mainFilePath);
}
// DONE: and all of its sizes(if any)
if (doc?.sizes) {
for (const imageName in doc.sizes) {
const filePath = path.resolve(__dirname + `../../../cloudinary_media/${doc.sizes[imageName].filename}`);
deleteFile(filePath);
}
}
// I tried deleting the empty directory as well but it was creating some errors
// while updating the operation so I left it, if there is any way we can do it without having errors then lmk
return doc;
};
const afterDeleteHook: AfterDeleteHook = ({ doc }) => {
// DONE: delete the files from Cloudinary using public_id obtained from cloudinary
cloudinary.uploader.destroy(doc.cloudPublicId, function (result, error) {
console.log(result, error);
});
return doc;
};
export { streamUpload, beforeChangeHook, afterChangeHook, afterDeleteHook };
PLUGIN FILE - src/cloudinary/cloudinaryPlugin.ts import { Config } from 'payload/config';
import { afterChangeHook, afterDeleteHook, beforeChangeHook } from './hooks/CloudinaryMediaHooks';
const addCloudinary = (incomingConfig: Config): Config => {
const config: Config = {
...incomingConfig,
collections: incomingConfig.collections.map((collection) => {
if (Boolean(collection.slug === 'cloudinary_media')) {
return {
...collection,
hooks: {
...collection.hooks,
beforeChange: [beforeChangeHook],
afterChange: [afterChangeHook],
afterDelete: [afterDeleteHook],
},
fields: [
...collection.fields,
{
name: 'cloudPublicId', // This field would be needed to delete and update cloudinary files.
type: 'text',
access: {
// prevent writing to the field, instead hooks are responsible for this
create: () => false,
update: () => false,
},
admin: {
position: 'sidebar',
condition: (data) => Boolean(data?.cloudPublicId),
readOnly: true,
},
},
{
name: 'cloudinaryURL',
type: 'text',
access: {
// prevent writing to the field, instead hooks are responsible for this
create: () => false,
update: () => false,
},
// I don't think we need this because I already set it in beforeChangeHook,
// I hope I am not missing something? but everything works smoothly for now
// hooks: {
// afterRead: [
// ({ data }) => {
// return data.cloudinaryURL;
// },
// ],
// },
admin: {
position: 'sidebar',
readOnly: true,
// only show the field when it has a value
condition: (data) => Boolean(data?.cloudinaryURL),
},
},
],
};
}
return collection
})
}
return config;
}
export default addCloudinary and Finally, the Collection file and field to be used in other collections // the collection file - src/collection/CloidinaryMedia.ts
import { CollectionConfig } from 'payload/types';
const CloudinaryMedia: CollectionConfig = {
slug: 'cloudinary_media',
upload: {
// thumbnail image for the admin UI will use cloudinary instead of the admin host URL
adminThumbnail: ({ doc }) => String(doc.cloudinaryURL),
},
fields: [
{
label: 'Alt Text',
name: 'altText',
type: 'text',
required: true,
},
],
};
export default CloudinaryMedia;
// field in collection type where you want to use it
fields: [
{
name: 'cloudinary_image',
label: 'Upload Product Image on Cloudinary',
type: 'upload',
relationTo: 'cloudinary_media',
},
] payload.config.ts needed some updates too - src/payload.config.ts import { buildConfig } from 'payload/config';
import path from 'path';
import Users from './collections/Users';
import Products from './collections/Products';
import CloudinaryMedia from './collections/CloudinaryMedia';
import addCloudinary from './cloudinary/cloudinaryPlugin';
const CloudinaryMediaHooks = path.resolve(__dirname, 'cloudinary/hooks/CloudinaryMediaHooks');
const mockModulePath = path.resolve(__dirname, 'mocks/emptyObject');
export default buildConfig({
serverURL: 'http://localhost:3000',
admin: {
user: Users.slug,
webpack: (config) => ({
...config,
resolve: {
...config.resolve,
alias: {
...config.resolve.alias,
[CloudinaryMediaHooks]: mockModulePath, // Did this as suggested by docs for server-side only
// packages like fs, cloudinary, etc
},
},
}),
},
collections: [Users, Products, CloudinaryMedia],
plugins: [
// Add cloudinary plugin
addCloudinary,
]
});
|
Beta Was this translation helpful? Give feedback.
-
Here's my S3 implementation for file uploads. import AWS from 'aws-sdk'
import { UploadedFile } from 'express-fileupload'
import { CollectionConfig, CollectionBeforeChangeHook, CollectionAfterDeleteHook } from 'payload/types'
import { IncomingUploadType } from 'payload/dist/uploads/types'
import { APIError } from 'payload/errors'
function isUploadedFile (object: unknown): object is UploadedFile {
if (typeof object === 'object') {
return 'data' in object
}
return false
}
let instance: AWS.S3 = null
const getCurrentS3Instance = (): AWS.S3 => {
if (instance === null) {
throw new APIError("S3 has not been initialized. Ensure you're calling `init()` with your S3 credentials before using these hooks.")
}
return instance
}
const options: FileOptions = {
bucket: null,
acl: 'private',
}
const getBucketName = (): string => {
if (options.bucket === null) {
throw new APIError("Bucket name has not been initialized. Ensure you're calling `init()` with your S3 credentials and file options before using these hooks.")
}
return options.bucket
}
export const uploadToS3: CollectionBeforeChangeHook = async ({ data, req }) => {
if (req?.files?.file) {
let uploadedFile: UploadedFile
if (isUploadedFile(req.files.file)) {
uploadedFile = req.files.file
} else {
uploadedFile = req.files.file[0]
}
const s3 = getCurrentS3Instance()
const bucket = getBucketName()
await s3.putObject({
Bucket: bucket,
Key: String(data.filename),
Body: uploadedFile.data,
ACL: 'public-read',
ContentType: uploadedFile.mimetype,
}).promise()
}
return data
}
export const deleteFromS3: CollectionAfterDeleteHook = async ({ doc }) => {
const s3 = getCurrentS3Instance()
await s3.deleteObject({
Bucket: process.env.SPACES_NAME,
Key: String(doc.filename),
}).promise()
}
type FileOptions = {
bucket: string;
acl?: 'private' | 'public-read';
}
type Doc = {
doc: { s3Url: string }
}
export function init (s3Configuration: AWS.S3.ClientConfiguration, fileOptions: FileOptions): void {
instance = new AWS.S3(s3Configuration)
options.bucket = fileOptions.bucket
if (fileOptions.acl) {
options.acl = fileOptions.acl
}
}
function isUploadObject (arg: unknown): arg is IncomingUploadType {
return typeof arg === 'object'
}
export function withS3Storage (
s3Configuration: AWS.S3.ClientConfiguration,
fileOptions: FileOptions,
collection: CollectionConfig,
): (getUrl: (filename: string) => string) => CollectionConfig {
init(s3Configuration, fileOptions)
return getUrl => {
collection.fields = [
...collection.fields,
{
label: 'S3 URL',
name: 's3Url',
type: 'text',
admin: {
readOnly: true,
},
hooks: {
beforeChange: [
(): undefined => undefined,
],
afterRead: [
({ data }): string => {
return getUrl(String(data.filename))
},
],
},
},
]
const {
beforeChange = [],
afterDelete = [],
...rest
} = collection.hooks || {}
collection.hooks = {
beforeChange: [
uploadToS3,
...beforeChange,
],
afterDelete: [
deleteFromS3,
...afterDelete,
],
...rest,
}
if (isUploadObject(collection.upload)) {
collection.upload.adminThumbnail = ({ doc }: Doc) => doc.s3Url
}
return collection
}
} The API feels a little weird I'd appreciate some suggestions on how to improve it. const MediaImage: CollectionConfig = {
// ...
}
// initialises the collection with the hooks and the fields
const withS3 = withS3Storage(
{
endpoint: (new AWS.Endpoint(`${process.env.SPACES_REGION}.digitaloceanspaces.com`)),
accessKeyId: process.env.SPACES_KEY,
secretAccessKey: process.env.SPACES_SECRET,
},
{
bucket: process.env.SPACES_NAME,
},
MediaImage,
)
// returns the CDN path
const s3Collection = withS3(filename => `https://${process.env.SPACES_NAME}.${process.env.SPACES_REGION}.cdn.digitaloceanspaces.com/${filename}`)
export default s3Collection |
Beta Was this translation helpful? Give feedback.
-
Many have asked how to integrate Payload's upload feature with various file hosting services. Rather than building a custom field, we recommend that you use the built in Payload
upload
collection for the admin UI and add hooks that handle the integration.Here is a stubbed out example of a collection that you can use with cloudinary, S3 or other vendors:
The answer to this discussion will include the complete code. In the future each hosting option will get a plugin to make it as turnkey and customizable as any other field type.
Beta Was this translation helpful? Give feedback.
All reactions