Merge remote-tracking branch 'origin/main'

This commit is contained in:
Alexander Karpov 2023-03-27 17:12:22 +03:00
commit 5164ccf8c1
29 changed files with 3089 additions and 41 deletions

View File

@ -0,0 +1 @@
__version__ = "1.0"

View File

@ -0,0 +1,12 @@
from django.contrib import admin
from .models import ChunkedUpload
class ChunkedUploadAdmin(admin.ModelAdmin):
list_display = ("upload_id", "filename", "status", "created_on")
search_fields = ("filename", "filename")
list_filter = ("status",)
admin.site.register(ChunkedUpload, ChunkedUploadAdmin)

View File

@ -0,0 +1,17 @@
from django.utils.translation import gettext as _
class http_status:
HTTP_200_OK = 200
HTTP_400_BAD_REQUEST = 400
HTTP_403_FORBIDDEN = 403
HTTP_410_GONE = 410
UPLOADING = 1
COMPLETE = 2
CHUNKED_UPLOAD_CHOICES = (
(UPLOADING, _("Uploading")),
(COMPLETE, _("Complete")),
)

View File

@ -0,0 +1,13 @@
"""
Exceptions raised by django-chunked-upload.
"""
class ChunkedUploadError(Exception):
"""
Exception raised if errors in the request/process.
"""
def __init__(self, status, **data):
self.status_code = status
self.data = data

View File

@ -0,0 +1,52 @@
from optparse import make_option
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.utils.translation import ugettext as _
from akarpov.contrib.chunked_upload.constants import COMPLETE, UPLOADING
from akarpov.contrib.chunked_upload.models import ChunkedUpload
from akarpov.contrib.chunked_upload.settings import EXPIRATION_DELTA
prompt_msg = _("Do you want to delete {obj}?")
class Command(BaseCommand):
# Has to be a ChunkedUpload subclass
model = ChunkedUpload
help = "Deletes chunked uploads that have already expired."
option_list = BaseCommand.option_list + (
make_option(
"--interactive",
action="store_true",
dest="interactive",
default=False,
help="Prompt confirmation before each deletion.",
),
)
def handle(self, *args, **options):
interactive = options.get("interactive")
count = {UPLOADING: 0, COMPLETE: 0}
qs = self.model.objects.all()
qs = qs.filter(created_on__lt=(timezone.now() - EXPIRATION_DELTA))
for chunked_upload in qs:
if interactive:
prompt = prompt_msg.format(obj=chunked_upload) + " (y/n): "
answer = input(prompt).lower()
while answer not in ("y", "n"):
answer = input(prompt).lower()
if answer == "n":
continue
count[chunked_upload.status] += 1
# Deleting objects individually to call delete method explicitly
chunked_upload.delete()
print("%i complete uploads were deleted." % count[COMPLETE])
print("%i incomplete uploads were deleted." % count[UPLOADING])

View File

@ -0,0 +1,72 @@
# Generated by Django 4.1.7 on 2023-03-27 11:18
import akarpov.contrib.chunked_upload.models
import akarpov.utils.files
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="ChunkedUpload",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"upload_id",
models.CharField(
default=akarpov.contrib.chunked_upload.models.generate_upload_id,
editable=False,
max_length=32,
unique=True,
),
),
(
"file",
models.FileField(
max_length=255,
upload_to=akarpov.utils.files.user_file_upload_mixin,
),
),
("filename", models.CharField(max_length=255)),
("offset", models.BigIntegerField(default=0)),
("created_on", models.DateTimeField(auto_now_add=True)),
(
"status",
models.PositiveSmallIntegerField(
choices=[(1, "Uploading"), (2, "Complete")], default=1
),
),
("completed_on", models.DateTimeField(blank=True, null=True)),
(
"user",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="chunked_uploads",
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
]

View File

@ -0,0 +1,107 @@
import hashlib
import uuid
from django.conf import settings
from django.core.files.uploadedfile import UploadedFile
from django.db import models
from django.utils import timezone
from .constants import CHUNKED_UPLOAD_CHOICES, UPLOADING
from .settings import (
DEFAULT_MODEL_USER_FIELD_BLANK,
DEFAULT_MODEL_USER_FIELD_NULL,
EXPIRATION_DELTA,
STORAGE,
UPLOAD_TO,
)
def generate_upload_id():
return uuid.uuid4().hex
class AbstractChunkedUpload(models.Model):
"""
Base chunked upload model. This model is abstract (doesn't create a table
in the database).
Inherit from this model to implement your own.
"""
upload_id = models.CharField(
max_length=32, unique=True, editable=False, default=generate_upload_id
)
file = models.FileField(max_length=255, upload_to=UPLOAD_TO, storage=STORAGE)
filename = models.CharField(max_length=255)
offset = models.BigIntegerField(default=0)
created_on = models.DateTimeField(auto_now_add=True)
status = models.PositiveSmallIntegerField(
choices=CHUNKED_UPLOAD_CHOICES, default=UPLOADING
)
completed_on = models.DateTimeField(null=True, blank=True)
@property
def expires_on(self):
return self.created_on + EXPIRATION_DELTA
@property
def expired(self):
return self.expires_on <= timezone.now()
@property
def md5(self):
if getattr(self, "_md5", None) is None:
md5 = hashlib.md5()
for chunk in self.file.chunks():
md5.update(chunk)
self._md5 = md5.hexdigest()
return self._md5
def delete(self, delete_file=True, *args, **kwargs):
if self.file:
storage, path = self.file.storage, self.file.path
super().delete(*args, **kwargs)
if self.file and delete_file:
storage.delete(path)
def __str__(self):
return f"<{self.filename} - upload_id: {self.upload_id} - bytes: {self.offset} - status: {self.status}>"
def append_chunk(self, chunk, chunk_size=None, save=True):
self.file.close()
with open(self.file.path, mode="ab") as file_obj: # mode = append+binary
file_obj.write(
chunk.read()
) # We can use .read() safely because chunk is already in memory
if chunk_size is not None:
self.offset += chunk_size
elif hasattr(chunk, "size"):
self.offset += chunk.size
else:
self.offset = self.file.size
self._md5 = None # Clear cached md5
if save:
self.save()
self.file.close() # Flush
def get_uploaded_file(self):
self.file.close()
self.file.open(mode="rb") # mode = read+binary
return UploadedFile(file=self.file, name=self.filename, size=self.offset)
class Meta:
abstract = True
class ChunkedUpload(AbstractChunkedUpload):
"""
Default chunked upload model.
"""
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="chunked_uploads",
null=DEFAULT_MODEL_USER_FIELD_NULL,
blank=DEFAULT_MODEL_USER_FIELD_BLANK,
)

View File

@ -0,0 +1,16 @@
from django.http import HttpResponse
from .settings import CONTENT_TYPE, ENCODER
class Response(HttpResponse):
""" """
def __init__(self, content, status=None, *args, **kwargs):
super().__init__(
content=ENCODER(content),
content_type=CONTENT_TYPE,
status=status,
*args,
**kwargs
)

View File

@ -0,0 +1,58 @@
import os.path
import time
from datetime import timedelta
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.module_loading import import_string
from akarpov.utils.files import user_file_upload_mixin
# How long after creation the upload will expire
DEFAULT_EXPIRATION_DELTA = timedelta(days=1)
EXPIRATION_DELTA = getattr(
settings, "CHUNKED_UPLOAD_EXPIRATION_DELTA", DEFAULT_EXPIRATION_DELTA
)
# Path where uploading files will be stored until completion
DEFAULT_UPLOAD_PATH = "chunked_uploads/%Y/%m/%d"
UPLOAD_PATH = getattr(settings, "CHUNKED_UPLOAD_PATH", DEFAULT_UPLOAD_PATH)
# upload_to function to be used in the FileField
def default_upload_to(instance, filename):
filename = os.path.join(UPLOAD_PATH, instance.upload_id + ".part")
return time.strftime(filename)
UPLOAD_TO = user_file_upload_mixin
# Storage system
try:
STORAGE = getattr(settings, "CHUNKED_UPLOAD_STORAGE_CLASS", lambda: None)()
except TypeError:
STORAGE = import_string(
getattr(settings, "CHUNKED_UPLOAD_STORAGE_CLASS", lambda: None)
)()
# Function used to encode response data. Receives a dict and return a string
DEFAULT_ENCODER = DjangoJSONEncoder().encode
ENCODER = getattr(settings, "CHUNKED_UPLOAD_ENCODER", DEFAULT_ENCODER)
# Content-Type for the response data
DEFAULT_CONTENT_TYPE = "application/json"
CONTENT_TYPE = getattr(settings, "CHUNKED_UPLOAD_CONTENT_TYPE", DEFAULT_CONTENT_TYPE)
# Max amount of data (in bytes) that can be uploaded. `None` means no limit
DEFAULT_MAX_BYTES = None
MAX_BYTES = getattr(settings, "CHUNKED_UPLOAD_MAX_BYTES", DEFAULT_MAX_BYTES)
# determine the "null" and "blank" properties of "user" field in the "ChunkedUpload" model
DEFAULT_MODEL_USER_FIELD_NULL = getattr(
settings, "CHUNKED_UPLOAD_MODEL_USER_FIELD_NULL", True
)
DEFAULT_MODEL_USER_FIELD_BLANK = getattr(
settings, "CHUNKED_UPLOAD_MODEL_USER_FIELD_BLANK", True
)

View File

@ -0,0 +1,2 @@
# Tests for chunked_upload should be created on the app where it is being used,
# with its own views and models.

View File

@ -0,0 +1,319 @@
import re
from django.core.files.base import ContentFile
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.views.generic import View
from .constants import COMPLETE, http_status
from .exceptions import ChunkedUploadError
from .models import ChunkedUpload
from .response import Response
from .settings import MAX_BYTES
def is_authenticated(user):
if callable(user.is_authenticated):
return user.is_authenticated() # Django <2.0
return user.is_authenticated # Django >=2.0
class ChunkedUploadBaseView(View):
"""
Base view for the rest of chunked upload views.
"""
# Has to be a ChunkedUpload subclass
model = ChunkedUpload
user_field_name = "user" # the field name that point towards the AUTH_USER in ChunkedUpload class or its subclasses
def get_queryset(self, request):
"""
Get (and filter) ChunkedUpload queryset.
By default, users can only continue uploading their own uploads.
"""
queryset = self.model.objects.all()
if (
hasattr(self.model, self.user_field_name)
and hasattr(request, "user")
and is_authenticated(request.user)
):
queryset = queryset.filter(**{self.user_field_name: request.user})
return queryset
def validate(self, request):
"""
Placeholder method to define extra validation.
Must raise ChunkedUploadError if validation fails.
"""
def get_response_data(self, chunked_upload, request):
"""
Data for the response. Should return a dictionary-like object.
Called *only* if POST is successful.
"""
return {}
def pre_save(self, chunked_upload, request, new=False):
"""
Placeholder method for calling before saving an object.
May be used to set attributes on the object that are implicit
in either the request, or the url.
"""
def save(self, chunked_upload, request, new=False):
"""
Method that calls save(). Overriding may be useful is save() needs
special args or kwargs.
"""
chunked_upload.save()
def post_save(self, chunked_upload, request, new=False):
"""
Placeholder method for calling after saving an object.
"""
def _save(self, chunked_upload):
"""
Wraps save() method.
"""
new = chunked_upload.id is None
self.pre_save(chunked_upload, self.request, new=new)
self.save(chunked_upload, self.request, new=new)
self.post_save(chunked_upload, self.request, new=new)
def check_permissions(self, request):
"""
Grants permission to start/continue an upload based on the request.
"""
if hasattr(request, "user") and not is_authenticated(request.user):
raise ChunkedUploadError(
status=http_status.HTTP_403_FORBIDDEN,
detail="Authentication credentials were not provided",
)
def _post(self, request, *args, **kwargs):
raise NotImplementedError
def post(self, request, *args, **kwargs):
"""
Handle POST requests.
"""
try:
self.check_permissions(request)
return self._post(request, *args, **kwargs)
except ChunkedUploadError as error:
return Response(error.data, status=error.status_code)
class ChunkedUploadView(ChunkedUploadBaseView):
"""
Uploads large files in multiple chunks. Also, has the ability to resume
if the upload is interrupted.
"""
field_name = "file"
content_range_header = "HTTP_CONTENT_RANGE"
content_range_pattern = re.compile(
r"^bytes (?P<start>\d+)-(?P<end>\d+)/(?P<total>\d+)$"
)
max_bytes = MAX_BYTES # Max amount of data that can be uploaded
# If `fail_if_no_header` is True, an exception will be raised if the
# content-range header is not found. Default is False to match Jquery File
# Upload behavior (doesn't send header if the file is smaller than chunk)
fail_if_no_header = False
def get_extra_attrs(self, request):
"""
Extra attribute values to be passed to the new ChunkedUpload instance.
Should return a dictionary-like object.
"""
attrs = {}
if (
hasattr(self.model, self.user_field_name)
and hasattr(request, "user")
and is_authenticated(request.user)
):
attrs[self.user_field_name] = request.user
return attrs
def get_max_bytes(self, request):
"""
Used to limit the max amount of data that can be uploaded. `None` means
no limit.
You can override this to have a custom `max_bytes`, e.g. based on
logged user.
"""
return self.max_bytes
def create_chunked_upload(self, save=False, **attrs):
"""
Creates new chunked upload instance. Called if no 'upload_id' is
found in the POST data.
"""
chunked_upload = self.model(**attrs)
# file starts empty
chunked_upload.file.save(name="", content=ContentFile(""), save=save)
return chunked_upload
def is_valid_chunked_upload(self, chunked_upload):
"""
Check if chunked upload has already expired or is already complete.
"""
if chunked_upload.expired:
raise ChunkedUploadError(
status=http_status.HTTP_410_GONE, detail="Upload has expired"
)
error_msg = 'Upload has already been marked as "%s"'
if chunked_upload.status == COMPLETE:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST, detail=error_msg % "complete"
)
def get_response_data(self, chunked_upload, request):
"""
Data for the response. Should return a dictionary-like object.
"""
return {
"upload_id": chunked_upload.upload_id,
"offset": chunked_upload.offset,
"expires": chunked_upload.expires_on,
}
def _post(self, request, *args, **kwargs):
chunk = request.FILES.get(self.field_name)
if chunk is None:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="No chunk file was submitted",
)
self.validate(request)
upload_id = request.POST.get("upload_id")
if upload_id:
chunked_upload = get_object_or_404(
self.get_queryset(request), upload_id=upload_id
)
self.is_valid_chunked_upload(chunked_upload)
else:
attrs = {"filename": chunk.name}
attrs.update(self.get_extra_attrs(request))
chunked_upload = self.create_chunked_upload(save=False, **attrs)
content_range = request.META.get(self.content_range_header, "")
match = self.content_range_pattern.match(content_range)
if match:
start = int(match.group("start"))
end = int(match.group("end"))
total = int(match.group("total"))
elif self.fail_if_no_header:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="Error in request headers",
)
else:
# Use the whole size when HTTP_CONTENT_RANGE is not provided
start = 0
end = chunk.size - 1
total = chunk.size
chunk_size = end - start + 1
max_bytes = self.get_max_bytes(request)
if max_bytes is not None and total > max_bytes:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="Size of file exceeds the limit (%s bytes)" % max_bytes,
)
if chunked_upload.offset != start:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="Offsets do not match",
offset=chunked_upload.offset,
)
if chunk.size != chunk_size:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="File size doesn't match headers",
)
chunked_upload.append_chunk(chunk, chunk_size=chunk_size, save=False)
self._save(chunked_upload)
return Response(
self.get_response_data(chunked_upload, request),
status=http_status.HTTP_200_OK,
)
class ChunkedUploadCompleteView(ChunkedUploadBaseView):
"""
Completes an chunked upload. Method `on_completion` is a placeholder to
define what to do when upload is complete.
"""
# I wouldn't recommend to turn off the md5 check, unless is really
# impacting your performance. Proceed at your own risk.
do_md5_check = True
def on_completion(self, uploaded_file, request):
"""
Placeholder method to define what to do when upload is complete.
"""
def is_valid_chunked_upload(self, chunked_upload):
"""
Check if chunked upload is already complete.
"""
if chunked_upload.status == COMPLETE:
error_msg = "Upload has already been marked as complete"
return ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST, detail=error_msg
)
def md5_check(self, chunked_upload, md5):
"""
Verify if md5 checksum sent by client matches generated md5.
"""
if chunked_upload.md5 != md5:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST,
detail="md5 checksum does not match",
)
def _post(self, request, *args, **kwargs):
upload_id = request.POST.get("upload_id")
md5 = request.POST.get("md5")
error_msg = None
if self.do_md5_check:
if not upload_id or not md5:
error_msg = "Both 'upload_id' and 'md5' are required"
elif not upload_id:
error_msg = "'upload_id' is required"
if error_msg:
raise ChunkedUploadError(
status=http_status.HTTP_400_BAD_REQUEST, detail=error_msg
)
chunked_upload = get_object_or_404(
self.get_queryset(request), upload_id=upload_id
)
self.validate(request)
self.is_valid_chunked_upload(chunked_upload)
if self.do_md5_check:
self.md5_check(chunked_upload, md5)
chunked_upload.status = COMPLETE
chunked_upload.completed_on = timezone.now()
self._save(chunked_upload)
self.on_completion(chunked_upload.get_uploaded_file(), request)
return Response(
self.get_response_data(chunked_upload, request),
status=http_status.HTTP_200_OK,
)

View File

@ -1,6 +0,0 @@
from django.urls import path
from drf_chunked_upload.views import ChunkedUploadView
urlpatterns = [
path("upload/", ChunkedUploadView.as_view(), name="chunked_upload"),
]

View File

@ -10,11 +10,12 @@
from django.urls import reverse
from model_utils.models import TimeStampedModel
from akarpov.contrib.chunked_upload.models import AbstractChunkedUpload
from akarpov.tools.shortener.models import ShortLink
from akarpov.utils.files import user_file_upload_mixin
class File(TimeStampedModel, ShortLink):
class File(AbstractChunkedUpload, TimeStampedModel, ShortLink):
"""model to store user's files"""
name = CharField(max_length=100)

View File

@ -1,9 +1,24 @@
from django.urls import path
from akarpov.files.views import files_view, folder_view
from akarpov.files.views import (
ChunkedUploadDemo,
MyChunkedUploadCompleteView,
MyChunkedUploadView,
files_view,
folder_view,
)
app_name = "files"
urlpatterns = [
path("upload", ChunkedUploadDemo.as_view(), name="chunked_upload"),
path(
"api/chunked_upload_complete/",
MyChunkedUploadCompleteView.as_view(),
name="api_chunked_upload_complete",
),
path(
"api/chunked_upload/", MyChunkedUploadView.as_view(), name="api_chunked_upload"
),
path("<str:slug>", files_view, name="view"),
path("f/<str:slug>", folder_view, name="folder"),
]

View File

@ -1,5 +1,10 @@
from django.views.generic import DetailView
from django.views.generic.base import TemplateView
from akarpov.contrib.chunked_upload.views import (
ChunkedUploadCompleteView,
ChunkedUploadView,
)
from akarpov.files.models import File, Folder
@ -19,3 +24,40 @@ class FileFolderView(DetailView):
folder_view = FileFolderView.as_view()
class ChunkedUploadDemo(TemplateView):
template_name = "files/upload.html"
class MyChunkedUploadView(ChunkedUploadView):
model = File
field_name = "the_file"
def check_permissions(self, request):
# Allow non authenticated users to make uploads
pass
class MyChunkedUploadCompleteView(ChunkedUploadCompleteView):
model = File
def check_permissions(self, request):
# Allow non authenticated users to make uploads
pass
def on_completion(self, uploaded_file, request):
# Do something with the uploaded file. E.g.:
# * Store the uploaded file on another model:
# SomeModel.objects.create(user=request.user, file=uploaded_file)
# * Pass it as an argument to a function:
# function_that_process_file(uploaded_file)
pass
def get_response_data(self, chunked_upload, request):
return {
"message": (
"You successfully uploaded '%s' (%s bytes)!"
% (chunked_upload.filename, chunked_upload.offset)
)
}

View File

@ -0,0 +1,12 @@
html {
margin: 0 auto;
max-width: 1000px;
}
body {
color: #5A5A5A;
font-family: monospace;
font-size: 14px;
}
.title {
text-align: center;
}

1466
akarpov/static/js/jquery.fileupload.js vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,217 @@
/*
* jQuery Iframe Transport Plugin 1.8.3
* https://github.com/blueimp/jQuery-File-Upload
*
* Copyright 2011, Sebastian Tschan
* https://blueimp.net
*
* Licensed under the MIT license:
* http://www.opensource.org/licenses/MIT
*/
/* global define, require, window, document */
(function (factory) {
'use strict';
if (typeof define === 'function' && define.amd) {
// Register as an anonymous AMD module:
define(['jquery'], factory);
} else if (typeof exports === 'object') {
// Node/CommonJS:
factory(require('jquery'));
} else {
// Browser globals:
factory(window.jQuery);
}
}(function ($) {
'use strict';
// Helper variable to create unique names for the transport iframes:
var counter = 0;
// The iframe transport accepts four additional options:
// options.fileInput: a jQuery collection of file input fields
// options.paramName: the parameter name for the file form data,
// overrides the name property of the file input field(s),
// can be a string or an array of strings.
// options.formData: an array of objects with name and value properties,
// equivalent to the return data of .serializeArray(), e.g.:
// [{name: 'a', value: 1}, {name: 'b', value: 2}]
// options.initialIframeSrc: the URL of the initial iframe src,
// by default set to "javascript:false;"
$.ajaxTransport('iframe', function (options) {
if (options.async) {
// javascript:false as initial iframe src
// prevents warning popups on HTTPS in IE6:
/*jshint scripturl: true */
var initialIframeSrc = options.initialIframeSrc || 'javascript:false;',
/*jshint scripturl: false */
form,
iframe,
addParamChar;
return {
send: function (_, completeCallback) {
form = $('<form style="display:none;"></form>');
form.attr('accept-charset', options.formAcceptCharset);
addParamChar = /\?/.test(options.url) ? '&' : '?';
// XDomainRequest only supports GET and POST:
if (options.type === 'DELETE') {
options.url = options.url + addParamChar + '_method=DELETE';
options.type = 'POST';
} else if (options.type === 'PUT') {
options.url = options.url + addParamChar + '_method=PUT';
options.type = 'POST';
} else if (options.type === 'PATCH') {
options.url = options.url + addParamChar + '_method=PATCH';
options.type = 'POST';
}
// IE versions below IE8 cannot set the name property of
// elements that have already been added to the DOM,
// so we set the name along with the iframe HTML markup:
counter += 1;
iframe = $(
'<iframe src="' + initialIframeSrc +
'" name="iframe-transport-' + counter + '"></iframe>'
).bind('load', function () {
var fileInputClones,
paramNames = $.isArray(options.paramName) ?
options.paramName : [options.paramName];
iframe
.unbind('load')
.bind('load', function () {
var response;
// Wrap in a try/catch block to catch exceptions thrown
// when trying to access cross-domain iframe contents:
try {
response = iframe.contents();
// Google Chrome and Firefox do not throw an
// exception when calling iframe.contents() on
// cross-domain requests, so we unify the response:
if (!response.length || !response[0].firstChild) {
throw new Error();
}
} catch (e) {
response = undefined;
}
// The complete callback returns the
// iframe content document as response object:
completeCallback(
200,
'success',
{'iframe': response}
);
// Fix for IE endless progress bar activity bug
// (happens on form submits to iframe targets):
$('<iframe src="' + initialIframeSrc + '"></iframe>')
.appendTo(form);
window.setTimeout(function () {
// Removing the form in a setTimeout call
// allows Chrome's developer tools to display
// the response result
form.remove();
}, 0);
});
form
.prop('target', iframe.prop('name'))
.prop('action', options.url)
.prop('method', options.type);
if (options.formData) {
$.each(options.formData, function (index, field) {
$('<input type="hidden"/>')
.prop('name', field.name)
.val(field.value)
.appendTo(form);
});
}
if (options.fileInput && options.fileInput.length &&
options.type === 'POST') {
fileInputClones = options.fileInput.clone();
// Insert a clone for each file input field:
options.fileInput.after(function (index) {
return fileInputClones[index];
});
if (options.paramName) {
options.fileInput.each(function (index) {
$(this).prop(
'name',
paramNames[index] || options.paramName
);
});
}
// Appending the file input fields to the hidden form
// removes them from their original location:
form
.append(options.fileInput)
.prop('enctype', 'multipart/form-data')
// enctype must be set as encoding for IE:
.prop('encoding', 'multipart/form-data');
// Remove the HTML5 form attribute from the input(s):
options.fileInput.removeAttr('form');
}
form.submit();
// Insert the file input fields at their original location
// by replacing the clones with the originals:
if (fileInputClones && fileInputClones.length) {
options.fileInput.each(function (index, input) {
var clone = $(fileInputClones[index]);
// Restore the original name and form properties:
$(input)
.prop('name', clone.prop('name'))
.attr('form', clone.attr('form'));
clone.replaceWith(input);
});
}
});
form.append(iframe).appendTo(document.body);
},
abort: function () {
if (iframe) {
// javascript:false as iframe src aborts the request
// and prevents warning popups on HTTPS in IE6.
// concat is used to avoid the "Script URL" JSLint error:
iframe
.unbind('load')
.prop('src', initialIframeSrc);
}
if (form) {
form.remove();
}
}
};
}
});
// The iframe transport returns the iframe content document as response.
// The following adds converters from iframe to text, json, html, xml
// and script.
// Please note that the Content-Type for JSON responses has to be text/plain
// or text/html, if the browser doesn't include application/json in the
// Accept header, else IE will show a download dialog.
// The Content-Type for XML responses on the other hand has to be always
// application/xml or text/xml, so IE properly parses the XML response.
// See also
// https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation
$.ajaxSetup({
converters: {
'iframe text': function (iframe) {
return iframe && $(iframe[0].body).text();
},
'iframe json': function (iframe) {
return iframe && $.parseJSON($(iframe[0].body).text());
},
'iframe html': function (iframe) {
return iframe && $(iframe[0].body).html();
},
'iframe xml': function (iframe) {
var xmlDoc = iframe && iframe[0];
return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc :
$.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) ||
$(xmlDoc.body).html());
},
'iframe script': function (iframe) {
return iframe && $.globalEval($(iframe[0].body).text());
}
}
});
}));

4
akarpov/static/js/jquery.js vendored Normal file

File diff suppressed because one or more lines are too long

563
akarpov/static/js/jquery.ui.widget.js vendored Normal file
View File

@ -0,0 +1,563 @@
/*! jQuery UI - v1.11.1+CommonJS - 2014-09-17
* http://jqueryui.com
* Includes: widget.js
* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */
(function( factory ) {
if ( typeof define === "function" && define.amd ) {
// AMD. Register as an anonymous module.
define([ "jquery" ], factory );
} else if (typeof exports === "object") {
// Node/CommonJS:
factory(require("jquery"));
} else {
// Browser globals
factory( jQuery );
}
}(function( $ ) {
/*!
* jQuery UI Widget 1.11.1
* http://jqueryui.com
*
* Copyright 2014 jQuery Foundation and other contributors
* Released under the MIT license.
* http://jquery.org/license
*
* http://api.jqueryui.com/jQuery.widget/
*/
var widget_uuid = 0,
widget_slice = Array.prototype.slice;
$.cleanData = (function( orig ) {
return function( elems ) {
var events, elem, i;
for ( i = 0; (elem = elems[i]) != null; i++ ) {
try {
// Only trigger remove when necessary to save time
events = $._data( elem, "events" );
if ( events && events.remove ) {
$( elem ).triggerHandler( "remove" );
}
// http://bugs.jquery.com/ticket/8235
} catch( e ) {}
}
orig( elems );
};
})( $.cleanData );
$.widget = function( name, base, prototype ) {
var fullName, existingConstructor, constructor, basePrototype,
// proxiedPrototype allows the provided prototype to remain unmodified
// so that it can be used as a mixin for multiple widgets (#8876)
proxiedPrototype = {},
namespace = name.split( "." )[ 0 ];
name = name.split( "." )[ 1 ];
fullName = namespace + "-" + name;
if ( !prototype ) {
prototype = base;
base = $.Widget;
}
// create selector for plugin
$.expr[ ":" ][ fullName.toLowerCase() ] = function( elem ) {
return !!$.data( elem, fullName );
};
$[ namespace ] = $[ namespace ] || {};
existingConstructor = $[ namespace ][ name ];
constructor = $[ namespace ][ name ] = function( options, element ) {
// allow instantiation without "new" keyword
if ( !this._createWidget ) {
return new constructor( options, element );
}
// allow instantiation without initializing for simple inheritance
// must use "new" keyword (the code above always passes args)
if ( arguments.length ) {
this._createWidget( options, element );
}
};
// extend with the existing constructor to carry over any static properties
$.extend( constructor, existingConstructor, {
version: prototype.version,
// copy the object used to create the prototype in case we need to
// redefine the widget later
_proto: $.extend( {}, prototype ),
// track widgets that inherit from this widget in case this widget is
// redefined after a widget inherits from it
_childConstructors: []
});
basePrototype = new base();
// we need to make the options hash a property directly on the new instance
// otherwise we'll modify the options hash on the prototype that we're
// inheriting from
basePrototype.options = $.widget.extend( {}, basePrototype.options );
$.each( prototype, function( prop, value ) {
if ( !$.isFunction( value ) ) {
proxiedPrototype[ prop ] = value;
return;
}
proxiedPrototype[ prop ] = (function() {
var _super = function() {
return base.prototype[ prop ].apply( this, arguments );
},
_superApply = function( args ) {
return base.prototype[ prop ].apply( this, args );
};
return function() {
var __super = this._super,
__superApply = this._superApply,
returnValue;
this._super = _super;
this._superApply = _superApply;
returnValue = value.apply( this, arguments );
this._super = __super;
this._superApply = __superApply;
return returnValue;
};
})();
});
constructor.prototype = $.widget.extend( basePrototype, {
// TODO: remove support for widgetEventPrefix
// always use the name + a colon as the prefix, e.g., draggable:start
// don't prefix for widgets that aren't DOM-based
widgetEventPrefix: existingConstructor ? (basePrototype.widgetEventPrefix || name) : name
}, proxiedPrototype, {
constructor: constructor,
namespace: namespace,
widgetName: name,
widgetFullName: fullName
});
// If this widget is being redefined then we need to find all widgets that
// are inheriting from it and redefine all of them so that they inherit from
// the new version of this widget. We're essentially trying to replace one
// level in the prototype chain.
if ( existingConstructor ) {
$.each( existingConstructor._childConstructors, function( i, child ) {
var childPrototype = child.prototype;
// redefine the child widget using the same prototype that was
// originally used, but inherit from the new version of the base
$.widget( childPrototype.namespace + "." + childPrototype.widgetName, constructor, child._proto );
});
// remove the list of existing child constructors from the old constructor
// so the old child constructors can be garbage collected
delete existingConstructor._childConstructors;
} else {
base._childConstructors.push( constructor );
}
$.widget.bridge( name, constructor );
return constructor;
};
$.widget.extend = function( target ) {
var input = widget_slice.call( arguments, 1 ),
inputIndex = 0,
inputLength = input.length,
key,
value;
for ( ; inputIndex < inputLength; inputIndex++ ) {
for ( key in input[ inputIndex ] ) {
value = input[ inputIndex ][ key ];
if ( input[ inputIndex ].hasOwnProperty( key ) && value !== undefined ) {
// Clone objects
if ( $.isPlainObject( value ) ) {
target[ key ] = $.isPlainObject( target[ key ] ) ?
$.widget.extend( {}, target[ key ], value ) :
// Don't extend strings, arrays, etc. with objects
$.widget.extend( {}, value );
// Copy everything else by reference
} else {
target[ key ] = value;
}
}
}
}
return target;
};
$.widget.bridge = function( name, object ) {
var fullName = object.prototype.widgetFullName || name;
$.fn[ name ] = function( options ) {
var isMethodCall = typeof options === "string",
args = widget_slice.call( arguments, 1 ),
returnValue = this;
// allow multiple hashes to be passed on init
options = !isMethodCall && args.length ?
$.widget.extend.apply( null, [ options ].concat(args) ) :
options;
if ( isMethodCall ) {
this.each(function() {
var methodValue,
instance = $.data( this, fullName );
if ( options === "instance" ) {
returnValue = instance;
return false;
}
if ( !instance ) {
return $.error( "cannot call methods on " + name + " prior to initialization; " +
"attempted to call method '" + options + "'" );
}
if ( !$.isFunction( instance[options] ) || options.charAt( 0 ) === "_" ) {
return $.error( "no such method '" + options + "' for " + name + " widget instance" );
}
methodValue = instance[ options ].apply( instance, args );
if ( methodValue !== instance && methodValue !== undefined ) {
returnValue = methodValue && methodValue.jquery ?
returnValue.pushStack( methodValue.get() ) :
methodValue;
return false;
}
});
} else {
this.each(function() {
var instance = $.data( this, fullName );
if ( instance ) {
instance.option( options || {} );
if ( instance._init ) {
instance._init();
}
} else {
$.data( this, fullName, new object( options, this ) );
}
});
}
return returnValue;
};
};
$.Widget = function( /* options, element */ ) {};
$.Widget._childConstructors = [];
$.Widget.prototype = {
widgetName: "widget",
widgetEventPrefix: "",
defaultElement: "<div>",
options: {
disabled: false,
// callbacks
create: null
},
_createWidget: function( options, element ) {
element = $( element || this.defaultElement || this )[ 0 ];
this.element = $( element );
this.uuid = widget_uuid++;
this.eventNamespace = "." + this.widgetName + this.uuid;
this.options = $.widget.extend( {},
this.options,
this._getCreateOptions(),
options );
this.bindings = $();
this.hoverable = $();
this.focusable = $();
if ( element !== this ) {
$.data( element, this.widgetFullName, this );
this._on( true, this.element, {
remove: function( event ) {
if ( event.target === element ) {
this.destroy();
}
}
});
this.document = $( element.style ?
// element within the document
element.ownerDocument :
// element is window or document
element.document || element );
this.window = $( this.document[0].defaultView || this.document[0].parentWindow );
}
this._create();
this._trigger( "create", null, this._getCreateEventData() );
this._init();
},
_getCreateOptions: $.noop,
_getCreateEventData: $.noop,
_create: $.noop,
_init: $.noop,
destroy: function() {
this._destroy();
// we can probably remove the unbind calls in 2.0
// all event bindings should go through this._on()
this.element
.unbind( this.eventNamespace )
.removeData( this.widgetFullName )
// support: jquery <1.6.3
// http://bugs.jquery.com/ticket/9413
.removeData( $.camelCase( this.widgetFullName ) );
this.widget()
.unbind( this.eventNamespace )
.removeAttr( "aria-disabled" )
.removeClass(
this.widgetFullName + "-disabled " +
"ui-state-disabled" );
// clean up events and states
this.bindings.unbind( this.eventNamespace );
this.hoverable.removeClass( "ui-state-hover" );
this.focusable.removeClass( "ui-state-focus" );
},
_destroy: $.noop,
widget: function() {
return this.element;
},
option: function( key, value ) {
var options = key,
parts,
curOption,
i;
if ( arguments.length === 0 ) {
// don't return a reference to the internal hash
return $.widget.extend( {}, this.options );
}
if ( typeof key === "string" ) {
// handle nested keys, e.g., "foo.bar" => { foo: { bar: ___ } }
options = {};
parts = key.split( "." );
key = parts.shift();
if ( parts.length ) {
curOption = options[ key ] = $.widget.extend( {}, this.options[ key ] );
for ( i = 0; i < parts.length - 1; i++ ) {
curOption[ parts[ i ] ] = curOption[ parts[ i ] ] || {};
curOption = curOption[ parts[ i ] ];
}
key = parts.pop();
if ( arguments.length === 1 ) {
return curOption[ key ] === undefined ? null : curOption[ key ];
}
curOption[ key ] = value;
} else {
if ( arguments.length === 1 ) {
return this.options[ key ] === undefined ? null : this.options[ key ];
}
options[ key ] = value;
}
}
this._setOptions( options );
return this;
},
_setOptions: function( options ) {
var key;
for ( key in options ) {
this._setOption( key, options[ key ] );
}
return this;
},
_setOption: function( key, value ) {
this.options[ key ] = value;
if ( key === "disabled" ) {
this.widget()
.toggleClass( this.widgetFullName + "-disabled", !!value );
// If the widget is becoming disabled, then nothing is interactive
if ( value ) {
this.hoverable.removeClass( "ui-state-hover" );
this.focusable.removeClass( "ui-state-focus" );
}
}
return this;
},
enable: function() {
return this._setOptions({ disabled: false });
},
disable: function() {
return this._setOptions({ disabled: true });
},
_on: function( suppressDisabledCheck, element, handlers ) {
var delegateElement,
instance = this;
// no suppressDisabledCheck flag, shuffle arguments
if ( typeof suppressDisabledCheck !== "boolean" ) {
handlers = element;
element = suppressDisabledCheck;
suppressDisabledCheck = false;
}
// no element argument, shuffle and use this.element
if ( !handlers ) {
handlers = element;
element = this.element;
delegateElement = this.widget();
} else {
element = delegateElement = $( element );
this.bindings = this.bindings.add( element );
}
$.each( handlers, function( event, handler ) {
function handlerProxy() {
// allow widgets to customize the disabled handling
// - disabled as an array instead of boolean
// - disabled class as method for disabling individual parts
if ( !suppressDisabledCheck &&
( instance.options.disabled === true ||
$( this ).hasClass( "ui-state-disabled" ) ) ) {
return;
}
return ( typeof handler === "string" ? instance[ handler ] : handler )
.apply( instance, arguments );
}
// copy the guid so direct unbinding works
if ( typeof handler !== "string" ) {
handlerProxy.guid = handler.guid =
handler.guid || handlerProxy.guid || $.guid++;
}
var match = event.match( /^([\w:-]*)\s*(.*)$/ ),
eventName = match[1] + instance.eventNamespace,
selector = match[2];
if ( selector ) {
delegateElement.delegate( selector, eventName, handlerProxy );
} else {
element.bind( eventName, handlerProxy );
}
});
},
_off: function( element, eventName ) {
eventName = (eventName || "").split( " " ).join( this.eventNamespace + " " ) + this.eventNamespace;
element.unbind( eventName ).undelegate( eventName );
},
_delay: function( handler, delay ) {
function handlerProxy() {
return ( typeof handler === "string" ? instance[ handler ] : handler )
.apply( instance, arguments );
}
var instance = this;
return setTimeout( handlerProxy, delay || 0 );
},
_hoverable: function( element ) {
this.hoverable = this.hoverable.add( element );
this._on( element, {
mouseenter: function( event ) {
$( event.currentTarget ).addClass( "ui-state-hover" );
},
mouseleave: function( event ) {
$( event.currentTarget ).removeClass( "ui-state-hover" );
}
});
},
_focusable: function( element ) {
this.focusable = this.focusable.add( element );
this._on( element, {
focusin: function( event ) {
$( event.currentTarget ).addClass( "ui-state-focus" );
},
focusout: function( event ) {
$( event.currentTarget ).removeClass( "ui-state-focus" );
}
});
},
_trigger: function( type, event, data ) {
var prop, orig,
callback = this.options[ type ];
data = data || {};
event = $.Event( event );
event.type = ( type === this.widgetEventPrefix ?
type :
this.widgetEventPrefix + type ).toLowerCase();
// the original event may come from any element
// so we need to reset the target on the new event
event.target = this.element[ 0 ];
// copy original event properties over to the new event
orig = event.originalEvent;
if ( orig ) {
for ( prop in orig ) {
if ( !( prop in event ) ) {
event[ prop ] = orig[ prop ];
}
}
}
this.element.trigger( event, data );
return !( $.isFunction( callback ) &&
callback.apply( this.element[0], [ event ].concat( data ) ) === false ||
event.isDefaultPrevented() );
}
};
$.each( { show: "fadeIn", hide: "fadeOut" }, function( method, defaultEffect ) {
$.Widget.prototype[ "_" + method ] = function( element, options, callback ) {
if ( typeof options === "string" ) {
options = { effect: options };
}
var hasOptions,
effectName = !options ?
method :
options === true || typeof options === "number" ?
defaultEffect :
options.effect || defaultEffect;
options = options || {};
if ( typeof options === "number" ) {
options = { duration: options };
}
hasOptions = !$.isEmptyObject( options );
options.complete = callback;
if ( options.delay ) {
element.delay( options.delay );
}
if ( hasOptions && $.effects && $.effects.effect[ effectName ] ) {
element[ method ]( options );
} else if ( effectName !== method && element[ effectName ] ) {
element[ effectName ]( options.duration, options.easing, callback );
} else {
element.queue(function( next ) {
$( this )[ method ]();
if ( callback ) {
callback.call( element[ 0 ] );
}
next();
});
}
};
});
var widget = $.widget;
}));

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,95 @@
{% extends 'base.html' %}
{% load static %}
{% block javascript %}
<script src="{% static 'js/jquery.js' %}"></script>
<script src="{% static 'js/jquery.ui.widget.js' %}"></script>
<!-- The Iframe Transport is required for browsers without support for XHR file uploads -->
<script src="{% static 'js/jquery.iframe-transport.js' %}"></script>
<!-- The basic File Upload plugin -->
<script src="{% static 'js/jquery.fileupload.js' %}"></script>
<!-- Calculate md5 -->
<script src="{% static 'js/spark-md5.js' %}"></script>
{% endblock %}
{% block content %}
<h1 class="title">
django-chunked-upload
</h1>
{% csrf_token %}
<input id="chunked_upload" type="file" name="the_file">
<p id="progress"></p>
<div id="messages"></div>
{% endblock %}
{% block inline_javascript %}
<script type="text/javascript">
var md5 = "",
csrf = $("input[name='csrfmiddlewaretoken']")[0].value,
form_data = [{"name": "csrfmiddlewaretoken", "value": csrf}];
function calculate_md5(file, chunk_size) {
var slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
chunks = chunks = Math.ceil(file.size / chunk_size),
current_chunk = 0,
spark = new SparkMD5.ArrayBuffer();
function onload(e) {
spark.append(e.target.result); // append chunk
current_chunk++;
if (current_chunk < chunks) {
read_next_chunk();
} else {
md5 = spark.end();
}
};
function read_next_chunk() {
var reader = new FileReader();
reader.onload = onload;
var start = current_chunk * chunk_size,
end = Math.min(start + chunk_size, file.size);
reader.readAsArrayBuffer(slice.call(file, start, end));
};
read_next_chunk();
}
$("#chunked_upload").fileupload({
url: "{% url 'files:api_chunked_upload' %}",
dataType: "json",
maxChunkSize: 100000, // Chunks of 100 kB
formData: form_data,
add: function(e, data) { // Called before starting upload
$("#messages").empty();
// If this is the second file you're uploading we need to remove the
// old upload_id and just keep the csrftoken (which is always first).
form_data.splice(1);
calculate_md5(data.files[0], 100000); // Again, chunks of 100 kB
data.submit();
},
chunkdone: function (e, data) { // Called after uploading each chunk
if (form_data.length < 2) {
form_data.push(
{"name": "upload_id", "value": data.result.upload_id}
);
}
$("#messages").append($('<p>').text(JSON.stringify(data.result)));
var progress = parseInt(data.loaded / data.total * 100.0, 10);
$("#progress").text(Array(progress).join("=") + "> " + progress + "%");
},
done: function (e, data) { // Called when the file has completely uploaded
$.ajax({
type: "POST",
url: "{% url 'files:api_chunked_upload_complete' %}",
data: {
csrfmiddlewaretoken: csrf,
upload_id: data.result.upload_id,
md5: md5
},
dataType: "json",
success: function(data) {
$("#messages").append($('<p>').text(JSON.stringify(data)));
}
});
},
});
</script>
{% endblock %}

View File

@ -19,7 +19,6 @@
"users/",
include("akarpov.users.api.urls"),
),
path("file/", include("akarpov.files.api.urls")),
path(
"tools/",
include([path("qr/", include("akarpov.tools.qr.api.urls"))]),

View File

@ -109,7 +109,7 @@
"polymorphic",
"cacheops",
"extra_settings",
"drf_chunked_upload",
"akarpov.contrib.chunked_upload",
"active_link",
# django-cms
"cms",

30
poetry.lock generated
View File

@ -973,18 +973,6 @@ django-timezone-field = ">=5.0"
python-crontab = ">=2.3.4"
tzdata = "*"
[[package]]
name = "django-chunked-upload"
version = "2.0.0"
description = "Upload large files to Django in multiple chunks, with the ability to resume if the upload is interrupted."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "django-chunked-upload-2.0.0.tar.gz", hash = "sha256:fb8961029d6a9febb974fb24f529c7773c61a9e6467825db861aae29e1f0c978"},
{file = "django_chunked_upload-2.0.0-py3-none-any.whl", hash = "sha256:f81ce8d7a5df87c75c971b31dd15961712e729f8fbb38a9398c85cc01f89316d"},
]
[[package]]
name = "django-ckeditor"
version = "6.5.1"
@ -1496,22 +1484,6 @@ files = [
{file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"},
]
[[package]]
name = "drf-chunked-upload"
version = "0.5.1"
description = "Upload large files to Django REST Framework in multiple chunks, with the ability to resume if the upload is interrupted."
category = "main"
optional = false
python-versions = "*"
files = [
{file = "drf-chunked-upload-0.5.1.tar.gz", hash = "sha256:7c4faa401295cc24bd270cee41f7f0af5268f91ec477237c4e40bcb51adddfef"},
{file = "drf_chunked_upload-0.5.1-py3-none-any.whl", hash = "sha256:1a2802d024f21e3ba86158bd31bb8fc33af96c0b8c0b87a759201ca0318f832a"},
]
[package.dependencies]
Django = ">=2.2"
djangorestframework = ">=3.11"
[[package]]
name = "drf-spectacular"
version = "0.26.1"
@ -4065,4 +4037,4 @@ files = [
[metadata]
lock-version = "2.0"
python-versions = "^3.11"
content-hash = "8bb4acf1ceb7b54ad1864fe625ca1aa68d4cc371ce45170bede728ad84fa2c84"
content-hash = "fb7f1e1b7dc4819e5b86b247de8485a3be33db285283a39279c027ca073a7643"

View File

@ -66,8 +66,6 @@ psycopg2-binary = "^2.9.5"
django-cms = "^3.11.1"
django-sekizai = "^4.0.0"
amzqr = "^0.0.1"
django-chunked-upload = "^2.0.0"
drf-chunked-upload = "^0.5.1"
django-active-link = "^0.1.8"
channels = "^4.0.0"
django-upload-validator = "^1.1.6"