Как я могу настроить свой класс просмотра Django для загрузки файла в определенную папку? - PullRequest
0 голосов
/ 25 сентября 2019

Я объединяю два приложения Django, chunked_upload и filer, поэтому у меня может быть файловая система, которая может загружать большие файлы с помощью чанкинга.filer основан на ajax_upload, что накладывает ограничения на размер файла.

Я близок к завершению, но мне все еще нужно перенаправить файл chunked_upload в папку, в которой я сейчас нахожусь (на данный момент все файлызагрузить на unsorted uploads независимо от того, в какой папке находится URL).

Поскольку кнопка загрузки chunked_upload основана на ajax / javascript, я должен обратиться к своему Django urls, чтобы затем получить доступ к представлению.Есть ли способ перевести старую кнопку загрузки на основе ajax из filer в новую кнопку загрузки на основе django из chunked_upload для ссылки на folder_id?(или, в основном, конвертируйте filer-ajax_upload в api_chunked_upload, как видно из моих URL)

chunked_upload.views.py (где находится новый вид загрузки)

class ChunkedUploadView(ChunkedUploadBaseView):
    """
    Uploads large files in multiple chunks. Also, has the ability to resume
    if the upload is interrupted.
    """

    field_name = 'file'
    content_range_header = 'HTTP_CONTENT_RANGE'
    content_range_pattern = re.compile(
        r'^bytes (?P<start>\d+)-(?P<end>\d+)/(?P<total>\d+)$'
    )
    max_bytes = MAX_BYTES  # Max amount of data that can be uploaded
    # If `fail_if_no_header` is True, an exception will be raised if the
    # content-range header is not found. Default is False to match Jquery File
    # Upload behavior (doesn't send header if the file is smaller than chunk)
    fail_if_no_header = False

    def get_extra_attrs(self, request):
        """
        Extra attribute values to be passed to the new ChunkedUpload instance.
        Should return a dictionary-like object.
        """
        return {}

    def get_max_bytes(self, request):
        """
        Used to limit the max amount of data that can be uploaded. `None` means
        no limit.
        You can override this to have a custom `max_bytes`, e.g. based on
        logged user.
        """

        return self.max_bytes

    def create_chunked_upload(self, save=False, **attrs):
        """
        Creates new chunked upload instance. Called if no 'upload_id' is
        found in the POST data.
        """
        chunked_upload = self.model(**attrs)
        # file starts empty
        chunked_upload.file.save(name= MyChunkedUpload.filename, content=ContentFile(''), save=save)
        return chunked_upload

    def is_valid_chunked_upload(self, chunked_upload):
        """
        Check if chunked upload has already expired or is already complete.
        """
        if chunked_upload.expired:
            raise ChunkedUploadError(status=http_status.HTTP_410_GONE,
                                     detail='Upload has expired')
        error_msg = 'Upload has already been marked as "%s"'
        if chunked_upload.status == COMPLETE:
            raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
                                     detail=error_msg % 'complete')

    def get_response_data(self, chunked_upload, request):
        """
        Data for the response. Should return a dictionary-like object.
        """
        return {
            'upload_id': chunked_upload.upload_id,
        }

    def _post(self, request, *args, **kwargs):
        chunk = request.FILES.get(self.field_name)
        if chunk is None:
            raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
                                     detail='No chunk file was submitted')
        self.validate(request)

        upload_id = request.POST.get('upload_id')
        if upload_id:
            chunked_upload = get_object_or_404(self.get_queryset(request),
                                               upload_id=upload_id)
            self.is_valid_chunked_upload(chunked_upload)
        else:
            attrs = {'filename': chunk.name}
            if hasattr(request, 'user') and is_authenticated(request.user):
                attrs['user'] = request.user
            attrs.update(self.get_extra_attrs(request))
            chunked_upload = self.create_chunked_upload(save=False, **attrs)

        content_range = request.META.get(self.content_range_header, '')
        match = self.content_range_pattern.match(content_range)
        if match:
            start = int(match.group('start'))
            end = int(match.group('end'))
            total = int(match.group('total'))
        elif self.fail_if_no_header:
            raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
                                     detail='Error in request headers')
        else:
            # Use the whole size when HTTP_CONTENT_RANGE is not provided
            start = 0
            end = chunk.size - 1
            total = chunk.size

        chunk_size = end - start + 1
        max_bytes = self.get_max_bytes(request)

        if max_bytes is not None and total > max_bytes:
            raise ChunkedUploadError(
                status=http_status.HTTP_400_BAD_REQUEST,
                detail='Size of file exceeds the limit (%s bytes)' % max_bytes
            )
        if chunked_upload.offset != start:
            raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
                                     detail='Offsets do not match',
                                     offset=chunked_upload.offset)
        if chunk.size != chunk_size:
            raise ChunkedUploadError(status=http_status.HTTP_400_BAD_REQUEST,
                                     detail="File size doesn't match headers")

        chunked_upload.append_chunk(chunk, chunk_size=chunk_size, save=False)

        self._save(chunked_upload)

        return Response(self.get_response_data(chunked_upload, request),
                        status=http_status.HTTP_200_OK)

filer.admin.clipboardadmin.py (где находится старое представление загрузки, скрипт ajax_upload)

def get_urls(self):
    return [
        url(r'^operations/upload/(?P<folder_id>[0-9]+)/$',
            ajax_upload,
            name='filer-ajax_upload'),
        url(r'^operations/upload/no_folder/$',
            ajax_upload,
            name='filer-ajax_upload'),
    ] + super(ClipboardAdmin, self).get_urls()

@csrf_exempt
def ajax_upload(request, folder_id=None):
    """
    Receives an upload from the uploader. Receives only one file at a time.
    """
    folder = None
    if folder_id:
        try:
            # Get folder
            folder = MyChunkedFolder.objects.get(pk=folder_id)
        except MyChunkedFolder.DoesNotExist:
            return JsonResponse({'error': NO_FOLDER_ERROR})

    # check permissions
    if folder and not folder.has_add_children_permission(request):
        return JsonResponse({'error': NO_PERMISSIONS_FOR_FOLDER})
    try:
        if len(request.FILES) == 1:
            # dont check if request is ajax or not, just grab the file
            upload, filename, is_raw = handle_request_files_upload(request)
        else:
            # else process the request as usual
            upload, filename, is_raw = handle_upload(request)
        # TODO: Deprecated/refactor
        # Get clipboad
        # clipboard = Clipboard.objects.get_or_create(user=request.user)[0]

        # find the file type
        for filer_class in filer_settings.FILER_FILE_MODELS:
            FileSubClass = load_model(filer_class)
            # TODO: What if there are more than one that qualify?
            if FileSubClass.matches_file_type(filename, upload, request):
                FileForm = modelform_factory(
                    model=FileSubClass,
                    fields=('original_filename', 'owner', 'file')
                )
                break
        uploadform = FileForm({'original_filename': filename,
                               'owner': request.user.pk},
                              {'file': upload})
        if uploadform.is_valid():
            file_obj = uploadform.save(commit=False)
            # Enforce the FILER_IS_PUBLIC_DEFAULT
            file_obj.is_public = filer_settings.FILER_IS_PUBLIC_DEFAULT
            file_obj.folder = folder
            file_obj.save()
            # TODO: Deprecated/refactor
            # clipboard_item = ClipboardItem(
            #     clipboard=clipboard, file=file_obj)
            # clipboard_item.save()

            # Try to generate thumbnails.
            if not file_obj.icons:
                # There is no point to continue, as we can't generate
                # thumbnails for this file. Usual reasons: bad format or
                # filename.
                file_obj.delete()
                # This would be logged in BaseImage._generate_thumbnails()
                # if FILER_ENABLE_LOGGING is on.
                return JsonResponse(
                    {'error': 'failed to generate icons for file'},
                    status=500,
                )
            thumbnail = None
            # Backwards compatibility: try to get specific icon size (32px)
            # first. Then try medium icon size (they are already sorted),
            # fallback to the first (smallest) configured icon.
            for size in (['32']
                        + filer_settings.FILER_ADMIN_ICON_SIZES[1::-1]):
                try:
                    thumbnail = file_obj.icons[size]
                    break
                except KeyError:
                    continue

            data = {
                'thumbnail': thumbnail,
                'alt_text': '',
                'label': str(file_obj),
                'file_id': file_obj.pk,
            }
            # prepare preview thumbnail
            if type(file_obj) == Image:
                thumbnail_180_options = {
                    'size': (180, 180),
                    'crop': True,
                    'upscale': True,
                }
                thumbnail_180 = file_obj.file.get_thumbnail(
                    thumbnail_180_options)
                data['thumbnail_180'] = thumbnail_180.url
                data['original_image'] = file_obj.url
            return JsonResponse(data)
        else:
            form_errors = '; '.join(['%s: %s' % (
                field,
                ', '.join(errors)) for field, errors in list(
                    uploadform.errors.items())
            ])
            raise UploadException(
                "AJAX request not valid: form invalid '%s'" % (
                    form_errors,))
    except UploadException as e:
        return JsonResponse({'error': str(e)}, status=500)

directory_listing.html (javascript, который пишет мойкнопка, я пытаюсь загрузить файл в некорневую папку; обращение к folder_id в некорневом URL-адресе - это то, что я пытаюсь выполнить, конечно, на данный момент он игнорирует folder_id)

{% block object-tools-items %}
    <div class="navigator-button-wrapper">
        {% if folder.can_have_subfolders and can_make_folder %}
            <a href="{% url 'admin:filer-directory_listing-make_root_folder' %}?parent_id={{ folder.id }}{% if is_popup %}&amp;_popup=1{% endif %}"
                title="{% trans 'Adds a new Folder' %}"
                class="navigator-button"
                onclick="return showAddAnotherPopup(this);">
                {% trans "New Folder" %}
            </a>
        {% endif %}

        {% if permissions.has_add_children_permission and not folder.is_root %}
                  {% csrf_token %}
                  <input id="chunked_upload" type="file" name="the_file">
                  <p id="progress"></p>
                  <div id="messages"></div>
                  <script type="text/javascript">
                    var md5 = "",
                        csrf = $("input[name='csrfmiddlewaretoken']")[0].value,
                        form_data = [{"name": "csrfmiddlewaretoken", "value": csrf}];
                    function calculate_md5(file, chunk_size) {
                      var slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
                          chunks = chunks = Math.ceil(file.size / chunk_size),
                          current_chunk = 0,
                          spark = new SparkMD5.ArrayBuffer();
                      function onload(e) {
                        spark.append(e.target.result);  // append chunk
                        current_chunk++;
                        if (current_chunk < chunks) {
                          read_next_chunk();
                        } else {
                          md5 = spark.end();
                        }
                      };
                      function read_next_chunk() {
                        var reader = new FileReader();
                        reader.onload = onload;
                        var start = current_chunk * chunk_size,
                            end = Math.min(start + chunk_size, file.size);
                        reader.readAsArrayBuffer(slice.call(file, start, end));
                      };
                      read_next_chunk();
                    }
                    $("#chunked_upload").fileupload({
                      url: "{% url 'api_chunked_upload' folder_id=folder.id %}",
                      dataType: "json",
                      maxChunkSize: 100000, // Chunks of 100 kB
                      formData: form_data,
                      add: function(e, data) { // Called before starting upload
                        $("#messages").empty();
                        // If this is the second file you're uploading we need to remove the
                        // old upload_id and just keep the csrftoken (which is always first).
                        form_data.splice(1);
                        calculate_md5(data.files[0], 100000);  // Again, chunks of 100 kB
                        data.submit();
                      },
                      chunkdone: function (e, data) { // Called after uploading each chunk
                        if (form_data.length < 2) {
                          form_data.push(
                            {"name": "upload_id", "value": data.result.upload_id}
                          );
                        }
                        $("#messages").append($('<p>').text(JSON.stringify(data.result)));
                        var progress = parseInt(data.loaded / data.total * 100.0, 10);
                        $("#progress").text(Array(progress).join("=") + "> " + progress + "%");
                      },
                      done: function (e, data) { // Called when the file has completely uploaded
                        $.ajax({
                          type: "POST",
                          url: "{% url 'api_chunked_upload_complete' %}",
                          data: {
                            csrfmiddlewaretoken: csrf,
                            upload_id: data.result.upload_id,
                            md5: md5
                          },
                          dataType: "json",
                          success: function(data) {
                            $("#messages").append($('<p>').text(JSON.stringify(data)));
                          }
                        });
                      },
                    });
                  </script>
        {% elif folder.is_unsorted_uploads %}
                {% csrf_token %}
                <input id="chunked_upload" type="file" name="the_file">
                <p id="progress"></p>
                <div id="messages"></div>
                <script type="text/javascript">
                    var md5 = "",
                        csrf = $("input[name='csrfmiddlewaretoken']")[0].value,
                        form_data = [{ "name": "csrfmiddlewaretoken", "value": csrf }];
                    function calculate_md5(file, chunk_size) {
                        var slice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice,
                            chunks = chunks = Math.ceil(file.size / chunk_size),
                            current_chunk = 0,
                            spark = new SparkMD5.ArrayBuffer();
                        function onload(e) {
                            spark.append(e.target.result);  // append chunk
                            current_chunk++;
                            if (current_chunk < chunks) {
                                read_next_chunk();
                            } else {
                                md5 = spark.end();
                            }
                        };
                        function read_next_chunk() {
                            var reader = new FileReader();
                            reader.onload = onload;
                            var start = current_chunk * chunk_size,
                                end = Math.min(start + chunk_size, file.size);
                            reader.readAsArrayBuffer(slice.call(file, start, end));
                        };
                        read_next_chunk();
                    }
                    $("#chunked_upload").fileupload({
                        url: "{% url 'api_chunked_upload' %}",
                        dataType: "json",
                        maxChunkSize: 100000, // Chunks of 100 kB
                        formData: form_data,
                        add: function (e, data) { // Called before starting upload
                            $("#messages").empty();
                            // If this is the second file you're uploading we need to remove the
                            // old upload_id and just keep the csrftoken (which is always first).
                            form_data.splice(1);
                            calculate_md5(data.files[0], 100000);  // Again, chunks of 100 kB
                            data.submit();
                        },
                        chunkdone: function (e, data) { // Called after uploading each chunk
                            if (form_data.length < 2) {
                                form_data.push(
                                    { "name": "upload_id", "value": data.result.upload_id }
                                );
                            }
                            $("#messages").append($('<p>').text(JSON.stringify(data.result)));
                            var progress = parseInt(data.loaded / data.total * 100.0, 10);
                            $("#progress").text(Array(progress).join("") + "> " + progress + "%");
                        },
                        done: function (e, data) { // Called when the file has completely uploaded
                            $.ajax({
                                type: "POST",
                                url: "{% url 'api_chunked_upload_complete' %}",
                                data: {
                                    csrfmiddlewaretoken: csrf,
                                    upload_id: data.result.upload_id,
                                    md5: md5
                                },
                                dataType: "json",
                                success: function (data) {
                                    $("#messages").append($('<p>').text(JSON.stringify(data)));
                                }
                            });
                        },
                    });
                </script>
        {% endif %}

urls.py (где находятся мои новые URL для загрузки)

urlpatterns = [
    url(r'^operations/upload/(?P<folder_id>[0-9]+)/$', MyChunkedUploadView.as_view(), name='api_chunked_upload'),
    url(r'^operations/upload/no_folder/$', MyChunkedUploadView.as_view(), name='api_chunked_upload'),
]
...