1
1
# -*- coding: utf-8 -*-
2
2
"""Define the custom static storage to surpress bad URL references."""
3
3
import os
4
+ from datetime import datetime
4
5
from os .path import basename
5
6
from secrets import token_hex
6
7
7
8
from django .conf import settings
9
+ from django .contrib .staticfiles .storage import HashedFilesMixin
8
10
from django .core .files .storage import get_storage_class
9
- from django .contrib .staticfiles .storage import ManifestFilesMixin , HashedFilesMixin
10
11
11
- from storages .backends .s3boto3 import S3Boto3Storage , SpooledTemporaryFile
12
+ from storages .backends .s3boto3 import S3ManifestStaticStorage , S3StaticStorage
12
13
13
14
14
- class SilentFileStorage (ManifestFilesMixin , S3Boto3Storage ):
15
+ class SilentFileStorage (S3ManifestStaticStorage , S3StaticStorage ):
15
16
"""Define the static storage using S3 via boto3 with hashing.
16
17
17
18
If Django cannot find a referenced url in an asset, it will silently pass.
18
19
19
20
"""
20
21
21
22
location = settings .STATICFILES_LOCATION
23
+ bucket_name = settings .AWS_STORAGE_BUCKET_NAME
24
+ custom_domain = settings .AWS_S3_CUSTOM_DOMAIN
22
25
23
26
def __init__ (self , * args , ** kwargs ):
24
- kwargs ['bucket' ] = settings .AWS_STORAGE_BUCKET_NAME
25
- kwargs ['custom_domain' ] = settings .AWS_S3_CUSTOM_DOMAIN
26
- # Init S3Boto3Storage and ManifestFilesMixin to send assets to S3
27
+ # Init S3StaticStorage and S3ManifestStaticStorage to send assets to S3
27
28
super (SilentFileStorage , self ).__init__ (* args , ** kwargs )
28
29
# Init CompressorFileStorage to save local copies for compressor
29
30
self .local_storage = get_storage_class ("compressor.storage.CompressorFileStorage" )()
30
31
# Init HashedFilesMixin to get filenames with hashes present
31
32
self .local_hashes = HashedFilesMixin ()
32
33
33
- def _save_content (self , obj , content , parameters ):
34
- """Create a clone of the content file to avoid premature closure.
35
-
36
- When this is passed to boto3 it wrongly closes the file upon upload
37
- where as the storage backend expects it to still be open.
38
-
39
- """
40
- # Seek our content back to the start
41
- content .seek (0 , os .SEEK_SET )
42
-
43
- # Create a temporary file that will write to disk after a specified size
44
- content_autoclose = SpooledTemporaryFile ()
45
-
46
- # Write our original content into our copy that will be closed by boto3
47
- content_autoclose .write (content .read ())
48
-
49
- # Upload the object which will auto close the content_autoclose instance
50
- super (SilentFileStorage , self )._save_content (obj , content_autoclose , parameters )
51
-
52
- # Cleanup if this is fixed upstream our duplicate should always close
53
- if not content_autoclose .closed :
54
- content_autoclose .close ()
55
-
56
34
def save (self , name , content ):
57
- # record the clean file content (pre gzip)
35
+ """Save both a local and a remote copy of the given file"""
36
+ # Record the clean file content (pre gzip)
58
37
file_content = content .file
59
38
# Save remote copy to S3
60
- super (SilentFileStorage , self ).save (name , content )
61
- # Only save .scss and .js files locally
62
- if ".scss" in name or ".js" in name :
39
+ super (SilentFileStorage , self ).save (name , content )
40
+ # Only save files that are part of the compress blocks locally
41
+ if ".scss" in name or ".js" in name or ".css" in name :
63
42
# restore the clean file_content
64
43
content .file = file_content
65
44
# Save a local copy for compressor
@@ -68,6 +47,19 @@ def save(self, name, content):
68
47
self .local_storage ._save (self .local_hashes .hashed_name (name , content ), content )
69
48
return name
70
49
50
+ def exists (self , name ):
51
+ """Check if the named file exists in S3 storage"""
52
+ # Check file exists on S3
53
+ exists = super (SilentFileStorage , self ).exists (name )
54
+ # This is a hack to get a status report during S3ManifestStaticStorage._postProcess
55
+ print ("INFO " + datetime .now ().strftime ("%Y-%m-%d %H:%M:%S" ) +
56
+ " - Checking for matching file hash on S3 - " + name + ": " + (
57
+ "Skipping based on matching file hashes" if exists else "Hashes did not match"
58
+ )
59
+ )
60
+ return exists
61
+
62
+
71
63
def url (self , name , force = True ):
72
64
"""Handle catching bad URLs and return the name if route is unavailable."""
73
65
try :
@@ -84,17 +76,13 @@ def _url(self, hashed_name_func, name, force=True, hashed_files=None):
84
76
return name
85
77
86
78
87
- class MediaFileStorage (S3Boto3Storage ):
79
+ class MediaFileStorage (S3StaticStorage ):
88
80
"""Define the media storage backend for user uploaded/stored files."""
89
81
90
82
location = settings .MEDIAFILES_LOCATION
83
+ bucket_name = settings .MEDIA_BUCKET
84
+ custom_domain = settings .MEDIA_CUSTOM_DOMAIN
91
85
92
86
def __init__ (self , * args , ** kwargs ):
93
- kwargs ['bucket' ] = settings .MEDIA_BUCKET
94
- kwargs ['custom_domain' ] = settings .MEDIA_CUSTOM_DOMAIN
95
87
# Save media to S3 only (we dont need an additional local copy)
96
88
super (MediaFileStorage , self ).__init__ (* args , ** kwargs )
97
-
98
-
99
- def get_salted_path (instance , filename ):
100
- return f'assets/{ token_hex (16 )[:15 ]} /{ basename (filename )} '
0 commit comments