mirror of
https://github.com/hedgedoc/hedgedoc.git
synced 2025-04-25 06:02:41 +00:00
Upload file to s3: make public a file on upload and allow to configure destination folder
New configurations: - s3folder: (string) folder to save the files inside bucket - s3publicFiles: (boolean) indicate if should send ACL parameters Signed-off-by: Lautaro Alvarez <lautarolalvarez@gmail.com>
This commit is contained in:
parent
8643d9cc99
commit
3585dc9ee6
4 changed files with 18 additions and 11 deletions
|
@ -251,14 +251,16 @@ you don't have to use either of these.
|
|||
|
||||
### Amazon S3
|
||||
|
||||
| config file | environment | **default** and example value | description |
|
||||
| ----------- | -------------------------- | ----------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `s3` | | `{ "accessKeyId": "YOUR_S3_ACCESS_KEY_ID", "secretAccessKey": "YOUR_S3_ACCESS_KEY", "region": "YOUR_S3_REGION" }` | When `imageuploadtype` be set to `s3`, you would also need to setup this key, check our [S3 Image Upload Guide](guides/s3-image-upload.md) |
|
||||
| | `CMD_S3_ACCESS_KEY_ID` | **no default** | AWS access key id |
|
||||
| | `CMD_S3_SECRET_ACCESS_KEY` | **no default** | AWS secret key |
|
||||
| | `CMD_S3_REGION` | **no default**, `ap-northeast-1` | AWS S3 region |
|
||||
| `s3bucket` | `CMD_S3_BUCKET` | **no default** | AWS S3 bucket name |
|
||||
| | `CMD_S3_ENDPOINT ENV` | **no default** | S3 API endpoint if you don't use AWS name |
|
||||
| config file | environment | **default** and example value | description |
|
||||
| --------------- | -------------------------- | ----------------------------------------------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `s3` | | `{ "accessKeyId": "YOUR_S3_ACCESS_KEY_ID", "secretAccessKey": "YOUR_S3_ACCESS_KEY", "region": "YOUR_S3_REGION" }` | When `imageuploadtype` be set to `s3`, you would also need to setup this key, check our [S3 Image Upload Guide](guides/s3-image-upload.md) |
|
||||
| | `CMD_S3_ACCESS_KEY_ID` | **no default** | AWS access key id |
|
||||
| | `CMD_S3_SECRET_ACCESS_KEY` | **no default** | AWS secret key |
|
||||
| | `CMD_S3_REGION` | **no default**, `ap-northeast-1` | AWS S3 region |
|
||||
| `s3bucket` | `CMD_S3_BUCKET` | **no default** | AWS S3 bucket name |
|
||||
| | `CMD_S3_ENDPOINT ENV` | **no default** | S3 API endpoint if you don't use AWS name |
|
||||
| `s3folder` | `CMD_S3_FOLDER` | 'uploads' | Folder where the files will be saved inside the bucket |
|
||||
| `s3publicFiles` | `CMD_S3_PUBLIC_FILES` | `false` | If se to `true` set public permissions to files when uploading to s3 storage |
|
||||
|
||||
### Azure Blob Storage
|
||||
|
||||
|
|
|
@ -76,6 +76,9 @@ module.exports = {
|
|||
secretAccessKey: undefined,
|
||||
region: undefined
|
||||
},
|
||||
s3bucket: undefined,
|
||||
s3folder: 'uploads',
|
||||
s3publicFiles: false,
|
||||
minio: {
|
||||
accessKey: undefined,
|
||||
secretKey: undefined,
|
||||
|
@ -83,7 +86,6 @@ module.exports = {
|
|||
secure: true,
|
||||
port: 9000
|
||||
},
|
||||
s3bucket: undefined,
|
||||
azure: {
|
||||
connectionString: undefined,
|
||||
container: undefined
|
||||
|
|
|
@ -57,6 +57,9 @@ module.exports = {
|
|||
region: process.env.CMD_S3_REGION,
|
||||
endpoint: process.env.CMD_S3_ENDPOINT
|
||||
},
|
||||
s3bucket: process.env.CMD_S3_BUCKET,
|
||||
s3folder: process.env.CMD_S3_FOLDER,
|
||||
s3publicFiles: process.env.CMD_S3_PUBLIC_FILES,
|
||||
minio: {
|
||||
accessKey: process.env.CMD_MINIO_ACCESS_KEY,
|
||||
secretKey: process.env.CMD_MINIO_SECRET_KEY,
|
||||
|
@ -67,7 +70,6 @@ module.exports = {
|
|||
lutim: {
|
||||
url: process.env.CMD_LUTIM_URL
|
||||
},
|
||||
s3bucket: process.env.CMD_S3_BUCKET,
|
||||
azure: {
|
||||
connectionString: process.env.CMD_AZURE_CONNECTION_STRING,
|
||||
container: process.env.CMD_AZURE_CONTAINER
|
||||
|
|
|
@ -28,12 +28,13 @@ exports.uploadImage = function (imagePath, callback) {
|
|||
}
|
||||
const params = {
|
||||
Bucket: config.s3bucket,
|
||||
Key: path.join('uploads', path.basename(imagePath)),
|
||||
Key: path.join(config.s3folder, path.basename(imagePath)),
|
||||
Body: buffer
|
||||
}
|
||||
|
||||
const mimeType = getImageMimeType(imagePath)
|
||||
if (mimeType) { params.ContentType = mimeType }
|
||||
if (config.s3publicFiles) { params.ACL = 'public-read' }
|
||||
|
||||
logger.debug(`S3 object parameters: ${JSON.stringify(params)}`)
|
||||
s3.putObject(params, function (err, data) {
|
||||
|
|
Loading…
Add table
Reference in a new issue