Skip to content

DRF Fields

S3FileTypeConfigField

Bases: ChoiceField

Custom Choice field for s3 configs.

Represent config choice in api and covert this choice to proper S3FileTypeConfig instance.

Source code in saritasa_s3_tools/django/drf_fields.py
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
class S3FileTypeConfigField(serializers.ChoiceField):
    """Custom Choice field for s3 configs.

    Represent config choice in api and covert this choice to proper
    S3FileTypeConfig instance.

    """

    def __init__(self, **kwargs) -> None:
        super().__init__(choices=(), **kwargs)

    def _get_choices(self) -> dict[str, str]:
        """Get choices from S3FileTypeConfig."""
        current_choices = tuple(
            (
                config_name,
                config_name,
            )
            for config_name in configs.S3FileTypeConfig.configs
        )
        choices = super()._get_choices()
        if current_choices != choices:
            self._set_choices(current_choices)
        return super()._get_choices()

    def _set_choices(self, choices: tuple[tuple[str, str], ...]) -> None:
        """Update choices.

        Redefined to avoid recursion.

        """
        self.grouped_choices = fields.to_choices_dict(choices)
        self._choices = fields.flatten_choices_dict(
            self.grouped_choices,
        )

        # Map the string representation of choices to the underlying value.
        # Allows us to deal with eg. integer choices while supporting either
        # integer or string input, but still get the correct datatype out.
        self.choice_strings_to_values = {
            str(key): key for key in self._choices
        }

    choices = property(_get_choices, _set_choices)

    def to_internal_value(
        self,
        data: typing.Any,
    ) -> configs.S3FileTypeConfig | None:
        """Convert api data to S3FileTypeConfig."""
        try:
            return configs.S3FileTypeConfig.configs[str(data)]
        except KeyError:
            self.fail("invalid_choice", input=data)

to_internal_value(data)

Convert api data to S3FileTypeConfig.

Source code in saritasa_s3_tools/django/drf_fields.py
57
58
59
60
61
62
63
64
65
def to_internal_value(
    self,
    data: typing.Any,
) -> configs.S3FileTypeConfig | None:
    """Convert api data to S3FileTypeConfig."""
    try:
        return configs.S3FileTypeConfig.configs[str(data)]
    except KeyError:
        self.fail("invalid_choice", input=data)

S3UploadURLField

Bases: CharField

Char serializer field for S3 files or keys or URLs or objects.

Convert url/path to a valid s3 key for bucket.

Example:

https://www.saritasa-s3-tools.s3.localhost/some-folder/file.txt will become some-folder/file.txt which is location of file in bucket (key). Same result will apply if AWS_LOCATION is used. https://www.saritasa-s3-tools.s3.localhost/locations/some-folder/file.txt will become some-folder/file.txt.

Source code in saritasa_s3_tools/django/drf_fields.py
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
class S3UploadURLField(serializers.CharField):
    """Char serializer field for S3 `files or keys or URLs or objects`.

    Convert url/path to a valid s3 key for bucket.

    Example:
    -------
    https://www.saritasa-s3-tools.s3.localhost/some-folder/file.txt will
    become `some-folder/file.txt` which is location of file in bucket (key).
    Same result will apply if AWS_LOCATION is used.
    https://www.saritasa-s3-tools.s3.localhost/locations/some-folder/file.txt
    will become `some-folder/file.txt`.

    """

    def __init__(self, **kwargs) -> None:
        """Make custom initialization."""
        # Remove explicit max_length limit in field
        # It causes issues with auto spec generation and validation.
        # Since it can return full urls with auth query, which can easily pass
        # a limit specified in model field(which is by default 100), it causes
        # confusion for openapi specs validators.
        max_length = kwargs.pop("max_length", None)
        self.storage: Storage = kwargs.pop("storage", default_storage)
        self.max_length_validator = None
        super().__init__(**kwargs)
        if max_length is not None:
            # Manually validate max length so that api specs would generate
            # without limit
            self.max_length_validator = validators.MaxLengthValidator(
                limit_value=max_length,
                message=lazy_format(
                    self.error_messages["max_length"],
                    max_length=max_length,
                ),
            )
        # Append this validator to enable invalid code for spec
        validator_for_spec = validators.MinLengthValidator(
            limit_value=0,
            message=self.error_messages["invalid"],
        )
        validator_for_spec.code = "invalid"
        self.validators.append(validator_for_spec)

    def to_internal_value(self, data: typing.Any) -> str:
        """Validate `data` and convert it to internal value.

        Cut domain from url to save it in file field.

        """
        if not isinstance(data, str):
            self.fail("invalid")  # pragma: no cover
        # Crop server domain and port and get relative path to avatar
        file_url = urllib.parse.urlparse(url=data).path

        # Crop S3 bucket name
        file_url = file_url.split(
            f"{self.storage.bucket_name}/",  # type: ignore
        )[-1].lstrip("/")

        # Normalize url
        file_url = urllib.parse.unquote_plus(file_url)

        # Remove aws-location prefix to keep only file name as key
        aws_location = self.storage.location  # type: ignore
        if aws_location and file_url.startswith(aws_location):
            file_url = file_url.split(f"{aws_location}/")[-1]
        if self.max_length_validator:
            self.max_length_validator(file_url)
        return file_url

    def to_representation(self, value: typing.Any) -> str | None:
        """Return full file url."""
        if not value:
            return None
        if isinstance(value, str):
            return self.storage.url(name=value)
        return value.url

__init__(**kwargs)

Make custom initialization.

Source code in saritasa_s3_tools/django/drf_fields.py
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
def __init__(self, **kwargs) -> None:
    """Make custom initialization."""
    # Remove explicit max_length limit in field
    # It causes issues with auto spec generation and validation.
    # Since it can return full urls with auth query, which can easily pass
    # a limit specified in model field(which is by default 100), it causes
    # confusion for openapi specs validators.
    max_length = kwargs.pop("max_length", None)
    self.storage: Storage = kwargs.pop("storage", default_storage)
    self.max_length_validator = None
    super().__init__(**kwargs)
    if max_length is not None:
        # Manually validate max length so that api specs would generate
        # without limit
        self.max_length_validator = validators.MaxLengthValidator(
            limit_value=max_length,
            message=lazy_format(
                self.error_messages["max_length"],
                max_length=max_length,
            ),
        )
    # Append this validator to enable invalid code for spec
    validator_for_spec = validators.MinLengthValidator(
        limit_value=0,
        message=self.error_messages["invalid"],
    )
    validator_for_spec.code = "invalid"
    self.validators.append(validator_for_spec)

to_internal_value(data)

Validate data and convert it to internal value.

Cut domain from url to save it in file field.

Source code in saritasa_s3_tools/django/drf_fields.py
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
def to_internal_value(self, data: typing.Any) -> str:
    """Validate `data` and convert it to internal value.

    Cut domain from url to save it in file field.

    """
    if not isinstance(data, str):
        self.fail("invalid")  # pragma: no cover
    # Crop server domain and port and get relative path to avatar
    file_url = urllib.parse.urlparse(url=data).path

    # Crop S3 bucket name
    file_url = file_url.split(
        f"{self.storage.bucket_name}/",  # type: ignore
    )[-1].lstrip("/")

    # Normalize url
    file_url = urllib.parse.unquote_plus(file_url)

    # Remove aws-location prefix to keep only file name as key
    aws_location = self.storage.location  # type: ignore
    if aws_location and file_url.startswith(aws_location):
        file_url = file_url.split(f"{aws_location}/")[-1]
    if self.max_length_validator:
        self.max_length_validator(file_url)
    return file_url

to_representation(value)

Return full file url.

Source code in saritasa_s3_tools/django/drf_fields.py
139
140
141
142
143
144
145
def to_representation(self, value: typing.Any) -> str | None:
    """Return full file url."""
    if not value:
        return None
    if isinstance(value, str):
        return self.storage.url(name=value)
    return value.url