bedrock.external.s3

You can use s3 as a storage backend for bedrock. By default, it will use the bucket name that you've configured (with environment variable <YOUR_APP>_STORAGE_BUCKET_NAME) but you can also specify another bucket name when saving a file.

Example - A Model that has an associated file

Let's consider that our application has images of Planets and each Planet model has a reference to that image file. So, the API receives a file as a base64 string, saves it to S3 and stores a reference to the s3 file in the Planets table in the DB (so that it can be accessed later).

Model

from sqlalchemy import Column, String
from sqlalchemy.dialects.postgresql import UUID, ARRAY
from bedrock.db.model_helper import Base, ModelHelper

class Planet(Base, ModelHelper):
    __tablename__ = "bedrock_planets"

    name = Column(String(255), nullable=False)
    image_reference = Column(String(2048), nullable=True)  # image reference in S3 (i.e. a path)

    @classmethod
    def has_custom_schema(cls):
        return True

    @classmethod
        def as_schema(cls):
            return """Planet:
      type: object
      properties:
        uuid:
          type: string
          nullable: false
        name:
          type: string
          nullable: false
        imageReference:
          type: string
          nullable: true
        imageUrl:
          type: string
          nullable: true
        updatedAt:
          type: string
          nullable: true
        createdAt:
          type: string
          nullable: false
    """

    def as_json(self):
    # get a pre-signed temporary URL to the image saved in s3
    image_url = get_s3_file_pre_signed_url(self.image_reference) if self.image_reference else None
        return {
            **super().as_json(),
            "imageUrl": image_url
        }

Endpoint

from bedrock.endpoint.endpoint import Endpoint
from bedrock.external.s3 import save_file
from model.planet import Planet


class Planets(Endpoint):
    __body_schema_name__ = "PlanetPayload"

    @classmethod
    def has_custom_body_schema(cls):
        return True

    @classmethod
    def get_body_schema(cls):
        return """PlanetPayload:
    type: object
    properties:
      name:
        type: string
        nullable: false
      imageReference:
        type: string
        nullable: true
        description: The path and name of the image file
      base64Image:
        type: string
        nullable: true
        description: The image file encoded in base64
"""

    def __init__(self):
        super().__init__("/planets/", related_model=Planet)

    # ...

    def post_global(self, event):
        # ...
        image_name = event["body"]["imageReference"]  # Instead, you may want to auto-generate a file name
        base64_image = event["body"]["base64Image"]

        # Save the file to S3 under the specified path `image_name`.
        save_file(image_name, base64_image)

        # Create a new planet from the request body, because request body includes the required `imageReference` field.
        # Then save it to the DB.
        # (maybe you'd be calling `post_global_generic` instead)
        planet = Planet.from_json(event["body"])
        planet.save()
        # ...
  1"""
  2You can use s3 as a storage backend for bedrock.
  3By default, it will use the bucket name that you've configured (with environment variable `<YOUR_APP>_STORAGE_BUCKET_NAME`)
  4but you can also specify another bucket name when saving a file.
  5
  6# Example - A Model that has an associated file
  7Let's consider that our application has images of Planets and each Planet model has a reference to that image file.
  8So, the API receives a file as a base64 string, saves it to S3 and stores a reference to the s3 file in the Planets
  9table in the DB (so that it can be accessed later).
 10
 11## Model
 12```python
 13from sqlalchemy import Column, String
 14from sqlalchemy.dialects.postgresql import UUID, ARRAY
 15from bedrock.db.model_helper import Base, ModelHelper
 16
 17class Planet(Base, ModelHelper):
 18    __tablename__ = "bedrock_planets"
 19
 20    name = Column(String(255), nullable=False)
 21    image_reference = Column(String(2048), nullable=True)  # image reference in S3 (i.e. a path)
 22
 23    @classmethod
 24    def has_custom_schema(cls):
 25        return True
 26
 27    @classmethod
 28        def as_schema(cls):
 29            return \"""Planet:
 30      type: object
 31      properties:
 32        uuid:
 33          type: string
 34          nullable: false
 35        name:
 36          type: string
 37          nullable: false
 38        imageReference:
 39          type: string
 40          nullable: true
 41        imageUrl:
 42          type: string
 43          nullable: true
 44        updatedAt:
 45          type: string
 46          nullable: true
 47        createdAt:
 48          type: string
 49          nullable: false
 50    \"""
 51
 52    def as_json(self):
 53    # get a pre-signed temporary URL to the image saved in s3
 54    image_url = get_s3_file_pre_signed_url(self.image_reference) if self.image_reference else None
 55        return {
 56            **super().as_json(),
 57            "imageUrl": image_url
 58        }
 59```
 60
 61## Endpoint
 62```python
 63from bedrock.endpoint.endpoint import Endpoint
 64from bedrock.external.s3 import save_file
 65from model.planet import Planet
 66
 67
 68class Planets(Endpoint):
 69    __body_schema_name__ = "PlanetPayload"
 70
 71    @classmethod
 72    def has_custom_body_schema(cls):
 73        return True
 74
 75    @classmethod
 76    def get_body_schema(cls):
 77        return \"""PlanetPayload:
 78    type: object
 79    properties:
 80      name:
 81        type: string
 82        nullable: false
 83      imageReference:
 84        type: string
 85        nullable: true
 86        description: The path and name of the image file
 87      base64Image:
 88        type: string
 89        nullable: true
 90        description: The image file encoded in base64
 91\"""
 92
 93    def __init__(self):
 94        super().__init__("/planets/", related_model=Planet)
 95
 96    # ...
 97
 98    def post_global(self, event):
 99        # ...
100        image_name = event["body"]["imageReference"]  # Instead, you may want to auto-generate a file name
101        base64_image = event["body"]["base64Image"]
102
103        # Save the file to S3 under the specified path `image_name`.
104        save_file(image_name, base64_image)
105
106        # Create a new planet from the request body, because request body includes the required `imageReference` field.
107        # Then save it to the DB.
108        # (maybe you'd be calling `post_global_generic` instead)
109        planet = Planet.from_json(event["body"])
110        planet.save()
111        # ...
112```
113"""
114import base64
115import io
116from bedrock.config import get_config_params
117from bedrock.log import log_config
118from bedrock.external.aws import make_client
119
120log = log_config("s3")
121
122
123def save_file(target_file_path: str, base64_string: str, content_type: str = None, bucket_name: str = None,
124              region: str = None) -> bool:
125    """
126    Saves a file to the specified bucket.
127    :param target_file_path: File path to save to
128    :param base64_string: Base64 string of the file. May also accept a data URL.
129    :param content_type: Content type of the file (defaults to the detected content type)
130    :param bucket_name: Bucket name to save to (defaults to the configured bucket)
131    :param region: The AWS region for the bucket (defaults to the configured region)
132    :return:
133    """
134    _content_type = content_type if content_type is not None else get_file_content_type(target_file_path, base64_string)
135    _base64_string = base64_string.replace("data:", "").split(",")[1] if "data:" in base64_string else base64_string
136    return _save_file(target_file_path, _base64_string, _content_type, bucket_name, region)
137
138
139def _save_file(target_file_path: str, base64_string: str, file_content_type, bucket_name: str = None,
140               region: str = None) -> bool:
141    """
142    Saves a file to the specified bucket.
143    :param target_file_path: File path to save to
144    :param base64_string: Base64 string (not a data URL!)
145    :param file_content_type: Content type of the file
146    :param bucket_name: Bucket name to save to (defaults to the configured bucket)
147    :param region: The AWS region for the bucket (defaults to the configured region)
148    :return:
149    """
150    log.debug(f"Saving file {target_file_path} to S3 bucket {bucket_name}")
151    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
152    try:
153        s3_client.upload_fileobj(
154            Fileobj=io.BytesIO(base64.b64decode(base64_string)),
155            Bucket=bucket,
156            Key=target_file_path,
157            ExtraArgs={"ContentType": file_content_type},
158            Callback=None,
159            Config=None)
160        return True
161    except Exception as e:
162        log.error(f"Unable to upload file {target_file_path} to S3 bucket {bucket}")
163        return False
164
165
166def upload_s3_file(source_file_path: str, bucket_name: str, s3_key: str, region: str = None) -> bool:
167    """
168    Uploads a file to S3.
169    :param source_file_path: The source path for the file
170    :param bucket_name: The name of the bucket to save to
171    :param s3_key: The key to save the file as (i.e. path in the bucket)
172    :param region: The AWS region for the bucket (defaults to the configured region)
173    :return:
174    """
175    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
176    try:
177        s3_client.upload_file(source_file_path, bucket, s3_key)
178        return True
179    except Exception as e:
180        log.error(f"Unable to send file {source_file_path} to S3 bucket {bucket} with key {s3_key}")
181        return False
182
183
184def delete_s3_file(file_path: str, bucket_name: str = None, region: str = None) -> bool:
185    """
186    Deletes a file in S3.
187    :param file_path: File path in S3
188    :param bucket_name: Bucket name to delete from (defaults to the configured bucket)
189    :param region: The AWS region for the bucket (defaults to the configured region)
190    :return:
191    """
192    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
193    try:
194        s3_client.delete_object(Bucket=bucket, Key=file_path)
195        return True
196    except Exception as e:
197        log.error(f"Unable to delete file {file_path} in S3 bucket {bucket}")
198        return False
199
200
201def get_s3_file(file_path: str, bucket_name: str, region: str = None) -> str or None:
202    """
203    Gets a file from S3.
204    :param file_path: File path in S3
205    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
206    :param region: The AWS region for the bucket (defaults to the configured region)
207    :return:
208    """
209    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
210    try:
211        s3_client.download_file(bucket, file_path, file_path)
212        return file_path
213    except Exception as e:
214        log.error(f"Unable to get file {file_path} in S3 bucket {bucket}")
215        return None
216
217
218def get_s3_file_pre_signed_url(file_path: str, bucket_name: str = None,
219                               expires_in: int = 1800, operation: str = 'get_object',
220                               region: str = None, content_type: str = None) -> str or None:
221    """
222    Generates a pre-signed URL for a file in S3.
223    :param file_path: File path in S3
224    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
225    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
226    :param operation: The type of operation to allow on the object (defaults to 'get_object')
227    :param region: The AWS region for the bucket (defaults to the configured region)
228    :param content_type: Content type to sign into the URL (for put_object). The PUT request must send this exact Content-Type header.
229    :return:
230    """
231    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
232    s3_params = {'Bucket': bucket, 'Key': file_path}
233    if operation == 'put_object':
234        s3_params['ContentType'] = content_type if content_type else get_file_content_type(file_path)
235    try:
236        return s3_client.generate_presigned_url(operation,
237                                                Params=s3_params,
238                                                ExpiresIn=expires_in)
239    except Exception as e:
240        log.error(f"Unable to create a pre-signed URL for {file_path} in S3 bucket {bucket}")
241        return None
242
243
244def generate_s3_get_presigned_url(file_path: str, bucket_name: str = None, expires_in: int = 1800, region: str = None):
245    """
246    Generates a get pre-signed URL for a file in S3.
247    :param file_path: File path in S3
248    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
249    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
250    :param region: The AWS region for the bucket (defaults to the configured region)
251    :return:
252    """
253    return get_s3_file_pre_signed_url(file_path, bucket_name, expires_in, 'get_object', region=region)
254
255
256def generate_s3_put_presigned_url(file_path: str, bucket_name: str = None, expires_in: int = 1800,
257                                  region: str = None, content_type: str = None):
258    """
259    Generates a put pre-signed URL for a file in S3.
260    :param file_path: File path in S3
261    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
262    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
263    :param region: The AWS region for the bucket (defaults to the configured region)
264    :param content_type: Content type to sign into the URL. The PUT request must send this exact Content-Type header.
265    :return:
266    """
267    return get_s3_file_pre_signed_url(file_path, bucket_name, expires_in, 'put_object', region=region,
268                                      content_type=content_type)
269
270
271def head_s3_object(file_path: str, bucket_name: str, region: str = None) -> dict | None:
272    """
273    Uses boto3 head_object method to retrieve metadata without downloading the object - can also be used to check an object exists
274    :param file_path: File path in S3
275    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
276    :param region: The AWS region for the bucket (defaults to the configured region)
277    :return: object metadata or None
278    """
279    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
280    try:
281        return s3_client.head_object(Bucket=bucket, Key=file_path)
282    except (s3_client.exceptions.NoSuchKey, s3_client.exceptions.ClientError) as e:
283        log.error(f"The object {file_path}does not exist in the bucket {bucket}")
284        return None
285
286
287def check_s3_object_exists(file_path: str, bucket_name: str, region: str = None) -> bool:
288    """
289    Uses head_s3_object method to check an object exists
290    :param file_path: File path in S3
291    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
292    :param region: The AWS region for the bucket (defaults to the configured region)
293    :return: true or false
294    """
295    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
296    try:
297        response = head_s3_object(file_path, bucket_name)
298        if response is None:
299            return False
300        else:
301            return True
302    except (s3_client.exceptions.NoSuchKey, s3_client.exceptions.ClientError) as e:
303        log.error(f"The object {file_path}does not exist in the bucket {bucket}")
304        return False
305
306
307def get_file_content_type(file_name: str, data_url: str = None) -> str:
308    """
309    Returns the content type of the file based on its extension or the data url.
310    :param file_name:
311    :param data_url:
312    :return:
313    """
314    if data_url is not None and "data:" in data_url:
315        return data_url.replace("data:", "").split(";")[0]
316    return _get_file_content_type(file_name)
317
318
319def _get_file_content_type(file_name: str) -> str:  # pragma: unit
320    lower_file_name = file_name.lower()
321    # Images
322    if lower_file_name.endswith(".jpg") or lower_file_name.endswith(".jpeg"):
323        return "image/jpeg"
324    if lower_file_name.endswith(".png"):
325        return "image/png"
326    if lower_file_name.endswith(".gif"):
327        return "image/gif"
328    if lower_file_name.endswith(".bmp"):
329        return "image/bmp"
330    if lower_file_name.endswith(".tiff") or lower_file_name.endswith(".tif"):
331        return "image/tiff"
332    if lower_file_name.endswith(".webp"):
333        return "image/webp"
334    if lower_file_name.endswith(".svg"):
335        return "image/svg+xml"
336
337    # Documents
338    if lower_file_name.endswith(".pdf"):
339        return "application/pdf"
340    if lower_file_name.endswith(".doc"):
341        return "application/msword"
342    if lower_file_name.endswith(".docx"):
343        return "application/vnd.openxmlformats-officedocument.wordprocessingml.document"
344    if lower_file_name.endswith(".xls"):
345        return "application/vnd.ms-excel"
346    if lower_file_name.endswith(".xlsx"):
347        return "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
348    if lower_file_name.endswith(".ppt"):
349        return "application/vnd.ms-powerpoint"
350    if lower_file_name.endswith(".pptx"):
351        return "application/vnd.openxmlformats-officedocument.presentationml.presentation"
352    if lower_file_name.endswith(".csv"):
353        return "text/csv"
354
355    # Video
356    if lower_file_name.endswith(".mp4"):
357        return "video/mp4"
358    if lower_file_name.endswith(".webm"):
359        return "video/webm"
360    if lower_file_name.endswith(".ogg"):
361        return "video/ogg"
362
363    # Audio
364    if lower_file_name.endswith(".mp3"):
365        return "audio/mpeg"
366    if lower_file_name.endswith(".wav"):
367        return "audio/wav"
368    if lower_file_name.endswith(".flac"):
369        return "audio/flac"
370
371    # Compressed
372    if lower_file_name.endswith(".zip"):
373        return "application/zip"
374    if lower_file_name.endswith(".tar"):
375        return "application/x-tar"
376    if lower_file_name.endswith(".gz"):
377        return "application/gzip"
378    if lower_file_name.endswith(".bz2"):
379        return "application/x-bzip2"
380    if lower_file_name.endswith(".xz"):
381        return "application/x-xz"
382    if lower_file_name.endswith(".rar"):
383        return "application/vnd.rar"
384    if lower_file_name.endswith(".7z"):
385        return "application/x-7z-compressed"
386
387    # Anything else
388    return "text/plain"
389
390
391def _get_s3_client_and_bucket(bucket: str, region: str):  # pragma: unit
392    _region = region if region else _get_default_region()
393    s3_client = make_client('s3', region_name=_region)
394    _bucket = bucket if bucket else _get_default_bucket()
395    return s3_client, _bucket
396
397
398def _get_default_region():  # pragma: unit
399    config = get_config_params()
400    try:
401        return config['aws']['region'] if config['aws']['region'] != "" else 'eu-west-1'
402    except KeyError:
403        return 'eu-west-1'
404
405
406def _get_default_bucket():  # pragma: unit
407    config = get_config_params()
408    try:
409        return config["storage"]["bucket_name"]
410    except KeyError:
411        raise Exception("No bucket name provided and no default bucket configured.")
log = <MyLogger BEDROCK-s3 (INFO)>
def save_file( target_file_path: str, base64_string: str, content_type: str = None, bucket_name: str = None, region: str = None) -> bool:
124def save_file(target_file_path: str, base64_string: str, content_type: str = None, bucket_name: str = None,
125              region: str = None) -> bool:
126    """
127    Saves a file to the specified bucket.
128    :param target_file_path: File path to save to
129    :param base64_string: Base64 string of the file. May also accept a data URL.
130    :param content_type: Content type of the file (defaults to the detected content type)
131    :param bucket_name: Bucket name to save to (defaults to the configured bucket)
132    :param region: The AWS region for the bucket (defaults to the configured region)
133    :return:
134    """
135    _content_type = content_type if content_type is not None else get_file_content_type(target_file_path, base64_string)
136    _base64_string = base64_string.replace("data:", "").split(",")[1] if "data:" in base64_string else base64_string
137    return _save_file(target_file_path, _base64_string, _content_type, bucket_name, region)

Saves a file to the specified bucket.

Parameters
  • target_file_path: File path to save to
  • base64_string: Base64 string of the file. May also accept a data URL.
  • content_type: Content type of the file (defaults to the detected content type)
  • bucket_name: Bucket name to save to (defaults to the configured bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns
def upload_s3_file( source_file_path: str, bucket_name: str, s3_key: str, region: str = None) -> bool:
167def upload_s3_file(source_file_path: str, bucket_name: str, s3_key: str, region: str = None) -> bool:
168    """
169    Uploads a file to S3.
170    :param source_file_path: The source path for the file
171    :param bucket_name: The name of the bucket to save to
172    :param s3_key: The key to save the file as (i.e. path in the bucket)
173    :param region: The AWS region for the bucket (defaults to the configured region)
174    :return:
175    """
176    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
177    try:
178        s3_client.upload_file(source_file_path, bucket, s3_key)
179        return True
180    except Exception as e:
181        log.error(f"Unable to send file {source_file_path} to S3 bucket {bucket} with key {s3_key}")
182        return False

Uploads a file to S3.

Parameters
  • source_file_path: The source path for the file
  • bucket_name: The name of the bucket to save to
  • s3_key: The key to save the file as (i.e. path in the bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns
def delete_s3_file(file_path: str, bucket_name: str = None, region: str = None) -> bool:
185def delete_s3_file(file_path: str, bucket_name: str = None, region: str = None) -> bool:
186    """
187    Deletes a file in S3.
188    :param file_path: File path in S3
189    :param bucket_name: Bucket name to delete from (defaults to the configured bucket)
190    :param region: The AWS region for the bucket (defaults to the configured region)
191    :return:
192    """
193    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
194    try:
195        s3_client.delete_object(Bucket=bucket, Key=file_path)
196        return True
197    except Exception as e:
198        log.error(f"Unable to delete file {file_path} in S3 bucket {bucket}")
199        return False

Deletes a file in S3.

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to delete from (defaults to the configured bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns
def get_s3_file(file_path: str, bucket_name: str, region: str = None) -> str:
202def get_s3_file(file_path: str, bucket_name: str, region: str = None) -> str or None:
203    """
204    Gets a file from S3.
205    :param file_path: File path in S3
206    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
207    :param region: The AWS region for the bucket (defaults to the configured region)
208    :return:
209    """
210    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
211    try:
212        s3_client.download_file(bucket, file_path, file_path)
213        return file_path
214    except Exception as e:
215        log.error(f"Unable to get file {file_path} in S3 bucket {bucket}")
216        return None

Gets a file from S3.

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns
def get_s3_file_pre_signed_url( file_path: str, bucket_name: str = None, expires_in: int = 1800, operation: str = 'get_object', region: str = None, content_type: str = None) -> str:
219def get_s3_file_pre_signed_url(file_path: str, bucket_name: str = None,
220                               expires_in: int = 1800, operation: str = 'get_object',
221                               region: str = None, content_type: str = None) -> str or None:
222    """
223    Generates a pre-signed URL for a file in S3.
224    :param file_path: File path in S3
225    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
226    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
227    :param operation: The type of operation to allow on the object (defaults to 'get_object')
228    :param region: The AWS region for the bucket (defaults to the configured region)
229    :param content_type: Content type to sign into the URL (for put_object). The PUT request must send this exact Content-Type header.
230    :return:
231    """
232    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
233    s3_params = {'Bucket': bucket, 'Key': file_path}
234    if operation == 'put_object':
235        s3_params['ContentType'] = content_type if content_type else get_file_content_type(file_path)
236    try:
237        return s3_client.generate_presigned_url(operation,
238                                                Params=s3_params,
239                                                ExpiresIn=expires_in)
240    except Exception as e:
241        log.error(f"Unable to create a pre-signed URL for {file_path} in S3 bucket {bucket}")
242        return None

Generates a pre-signed URL for a file in S3.

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
  • operation: The type of operation to allow on the object (defaults to 'get_object')
  • region: The AWS region for the bucket (defaults to the configured region)
  • content_type: Content type to sign into the URL (for put_object). The PUT request must send this exact Content-Type header.
Returns
def generate_s3_get_presigned_url( file_path: str, bucket_name: str = None, expires_in: int = 1800, region: str = None):
245def generate_s3_get_presigned_url(file_path: str, bucket_name: str = None, expires_in: int = 1800, region: str = None):
246    """
247    Generates a get pre-signed URL for a file in S3.
248    :param file_path: File path in S3
249    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
250    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
251    :param region: The AWS region for the bucket (defaults to the configured region)
252    :return:
253    """
254    return get_s3_file_pre_signed_url(file_path, bucket_name, expires_in, 'get_object', region=region)

Generates a get pre-signed URL for a file in S3.

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns
def generate_s3_put_presigned_url( file_path: str, bucket_name: str = None, expires_in: int = 1800, region: str = None, content_type: str = None):
257def generate_s3_put_presigned_url(file_path: str, bucket_name: str = None, expires_in: int = 1800,
258                                  region: str = None, content_type: str = None):
259    """
260    Generates a put pre-signed URL for a file in S3.
261    :param file_path: File path in S3
262    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
263    :param expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
264    :param region: The AWS region for the bucket (defaults to the configured region)
265    :param content_type: Content type to sign into the URL. The PUT request must send this exact Content-Type header.
266    :return:
267    """
268    return get_s3_file_pre_signed_url(file_path, bucket_name, expires_in, 'put_object', region=region,
269                                      content_type=content_type)

Generates a put pre-signed URL for a file in S3.

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • expires_in: Number of seconds the URL is valid for (defaults to 30 minutes)
  • region: The AWS region for the bucket (defaults to the configured region)
  • content_type: Content type to sign into the URL. The PUT request must send this exact Content-Type header.
Returns
def head_s3_object(file_path: str, bucket_name: str, region: str = None) -> dict | None:
272def head_s3_object(file_path: str, bucket_name: str, region: str = None) -> dict | None:
273    """
274    Uses boto3 head_object method to retrieve metadata without downloading the object - can also be used to check an object exists
275    :param file_path: File path in S3
276    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
277    :param region: The AWS region for the bucket (defaults to the configured region)
278    :return: object metadata or None
279    """
280    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
281    try:
282        return s3_client.head_object(Bucket=bucket, Key=file_path)
283    except (s3_client.exceptions.NoSuchKey, s3_client.exceptions.ClientError) as e:
284        log.error(f"The object {file_path}does not exist in the bucket {bucket}")
285        return None

Uses boto3 head_object method to retrieve metadata without downloading the object - can also be used to check an object exists

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns

object metadata or None

def check_s3_object_exists(file_path: str, bucket_name: str, region: str = None) -> bool:
288def check_s3_object_exists(file_path: str, bucket_name: str, region: str = None) -> bool:
289    """
290    Uses head_s3_object method to check an object exists
291    :param file_path: File path in S3
292    :param bucket_name: Bucket name to get from (defaults to the configured bucket)
293    :param region: The AWS region for the bucket (defaults to the configured region)
294    :return: true or false
295    """
296    s3_client, bucket = _get_s3_client_and_bucket(bucket_name, region)
297    try:
298        response = head_s3_object(file_path, bucket_name)
299        if response is None:
300            return False
301        else:
302            return True
303    except (s3_client.exceptions.NoSuchKey, s3_client.exceptions.ClientError) as e:
304        log.error(f"The object {file_path}does not exist in the bucket {bucket}")
305        return False

Uses head_s3_object method to check an object exists

Parameters
  • file_path: File path in S3
  • bucket_name: Bucket name to get from (defaults to the configured bucket)
  • region: The AWS region for the bucket (defaults to the configured region)
Returns

true or false

def get_file_content_type(file_name: str, data_url: str = None) -> str:
308def get_file_content_type(file_name: str, data_url: str = None) -> str:
309    """
310    Returns the content type of the file based on its extension or the data url.
311    :param file_name:
312    :param data_url:
313    :return:
314    """
315    if data_url is not None and "data:" in data_url:
316        return data_url.replace("data:", "").split(";")[0]
317    return _get_file_content_type(file_name)

Returns the content type of the file based on its extension or the data url.

Parameters
  • file_name:
  • data_url:
Returns