Bases: CharField
Char serializer field for S3 files or keys or URLs or objects.
Convert url/path to a valid s3 key for bucket.
https://www.saritasa-s3-tools.s3.localhost/some-folder/file.txt will
become some-folder/file.txt which is location of file in bucket (key).
Same result will apply if AWS_LOCATION is used.
https://www.saritasa-s3-tools.s3.localhost/locations/some-folder/file.txt
will become some-folder/file.txt.
Source code in saritasa_s3_tools/django/drf_fields.py
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145 | class S3UploadURLField(serializers.CharField):
"""Char serializer field for S3 `files or keys or URLs or objects`.
Convert url/path to a valid s3 key for bucket.
Example:
-------
https://www.saritasa-s3-tools.s3.localhost/some-folder/file.txt will
become `some-folder/file.txt` which is location of file in bucket (key).
Same result will apply if AWS_LOCATION is used.
https://www.saritasa-s3-tools.s3.localhost/locations/some-folder/file.txt
will become `some-folder/file.txt`.
"""
def __init__(self, **kwargs) -> None:
"""Make custom initialization."""
# Remove explicit max_length limit in field
# It causes issues with auto spec generation and validation.
# Since it can return full urls with auth query, which can easily pass
# a limit specified in model field(which is by default 100), it causes
# confusion for openapi specs validators.
max_length = kwargs.pop("max_length", None)
self.storage: Storage = kwargs.pop("storage", default_storage)
self.max_length_validator = None
super().__init__(**kwargs)
if max_length is not None:
# Manually validate max length so that api specs would generate
# without limit
self.max_length_validator = validators.MaxLengthValidator(
limit_value=max_length,
message=lazy_format(
self.error_messages["max_length"],
max_length=max_length,
),
)
# Append this validator to enable invalid code for spec
validator_for_spec = validators.MinLengthValidator(
limit_value=0,
message=self.error_messages["invalid"],
)
validator_for_spec.code = "invalid"
self.validators.append(validator_for_spec)
def to_internal_value(self, data: typing.Any) -> str:
"""Validate `data` and convert it to internal value.
Cut domain from url to save it in file field.
"""
if not isinstance(data, str):
self.fail("invalid") # pragma: no cover
# Crop server domain and port and get relative path to avatar
file_url = urllib.parse.urlparse(url=data).path
# Crop S3 bucket name
file_url = file_url.split(
f"{self.storage.bucket_name}/", # type: ignore
)[-1].lstrip("/")
# Normalize url
file_url = urllib.parse.unquote_plus(file_url)
# Remove aws-location prefix to keep only file name as key
aws_location = self.storage.location # type: ignore
if aws_location and file_url.startswith(aws_location):
file_url = file_url.split(f"{aws_location}/")[-1]
if self.max_length_validator:
self.max_length_validator(file_url)
return file_url
def to_representation(self, value: typing.Any) -> str | None:
"""Return full file url."""
if not value:
return None
if isinstance(value, str):
return self.storage.url(name=value)
return value.url
|
Make custom initialization.
Source code in saritasa_s3_tools/django/drf_fields.py
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110 | def __init__(self, **kwargs) -> None:
"""Make custom initialization."""
# Remove explicit max_length limit in field
# It causes issues with auto spec generation and validation.
# Since it can return full urls with auth query, which can easily pass
# a limit specified in model field(which is by default 100), it causes
# confusion for openapi specs validators.
max_length = kwargs.pop("max_length", None)
self.storage: Storage = kwargs.pop("storage", default_storage)
self.max_length_validator = None
super().__init__(**kwargs)
if max_length is not None:
# Manually validate max length so that api specs would generate
# without limit
self.max_length_validator = validators.MaxLengthValidator(
limit_value=max_length,
message=lazy_format(
self.error_messages["max_length"],
max_length=max_length,
),
)
# Append this validator to enable invalid code for spec
validator_for_spec = validators.MinLengthValidator(
limit_value=0,
message=self.error_messages["invalid"],
)
validator_for_spec.code = "invalid"
self.validators.append(validator_for_spec)
|
Validate data and convert it to internal value.
Cut domain from url to save it in file field.
Source code in saritasa_s3_tools/django/drf_fields.py
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137 | def to_internal_value(self, data: typing.Any) -> str:
"""Validate `data` and convert it to internal value.
Cut domain from url to save it in file field.
"""
if not isinstance(data, str):
self.fail("invalid") # pragma: no cover
# Crop server domain and port and get relative path to avatar
file_url = urllib.parse.urlparse(url=data).path
# Crop S3 bucket name
file_url = file_url.split(
f"{self.storage.bucket_name}/", # type: ignore
)[-1].lstrip("/")
# Normalize url
file_url = urllib.parse.unquote_plus(file_url)
# Remove aws-location prefix to keep only file name as key
aws_location = self.storage.location # type: ignore
if aws_location and file_url.startswith(aws_location):
file_url = file_url.split(f"{aws_location}/")[-1]
if self.max_length_validator:
self.max_length_validator(file_url)
return file_url
|
Return full file url.
Source code in saritasa_s3_tools/django/drf_fields.py
139
140
141
142
143
144
145 | def to_representation(self, value: typing.Any) -> str | None:
"""Return full file url."""
if not value:
return None
if isinstance(value, str):
return self.storage.url(name=value)
return value.url
|