Motivation: Laravel 5 now has support for Cloud storage.

Problem: Flowjs library allows files to be uploaded in chunks making it possible to upload large files but by default it saves the final file to local storage.

We could extend this class so that it uses S3 Stream Wrapper:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
<?php namespace Bao\Flow;

use Flow\ConfigInterface;
use Flow\RequestInterface;
use Flow\FileOpenException;
use Flow\FileLockException;

use Illuminate\Support\Facades\Storage;
use League\Flysystem\FilesystemInterface;

use Aws\S3\S3Client;

use Exception;

class File extends \Flow\File {

    /**
     * @var \Flow\RequestInterface
     */
    private $request;

    /**
     * @var \Flow\ConfigInterface
     */
    private $config;

    /**
     * @var \Aws\S3\S3Client
     */
    private $s3Client;

    public function __construct(ConfigInterface $config, RequestInterface $request = null, S3Client $s3Client = null)
    {
        parent::__construct($config, $request);
        $this->config = $config;
        $this->request = $request;
        $this->s3Client = $s3Client;
    }

    /**
     * Merge all chunks to single file
     *
     * @param string $destination final file location
     *
     * @throws FileLockException
     * @throws FileOpenException
     * @throws \Exception
     *
     * @return bool indicates if file was saved
     */
    public function save($destination)
    {
        $toS3 = !empty($this->s3Client);
        $local = !$toS3;

        if ($toS3 && !starts_with($destination, 's3://'))
        {
            // @see http://docs.aws.amazon.com/aws-sdk-php/guide/latest/feature-s3-stream-wrapper.html
            throw new Exception('Not a valid S3 protocol');
        }

        if ($toS3)
        {
            $this->s3Client->registerStreamWrapper();
        }

        $fh = fopen($destination, 'w');
        if (!$fh)
        {
            throw new FileOpenException('failed to open destination file: ' . $destination);
        }

        if ($local && !flock($fh, LOCK_EX | LOCK_NB, $blocked)) {
            // @codeCoverageIgnoreStart
            if ($blocked)
            {
                // Concurrent request has requested a lock.
                // File is being processed at the moment.
                // Warning: lock is not checked in windows.
                return false;
            }
            // @codeCoverageIgnoreEnd

            throw new FileLockException('failed to lock file: ' . $destination);
        }

        $totalChunks = $this->request->getTotalChunks();

        try
        {
            $preProcessChunk = $this->config->getPreprocessCallback();

            for ($i = 1; $i <= $totalChunks; $i++)
            {
                $file = $this->getChunkPath($i);
                $chunk = fopen($file, "rb");

                if (!$chunk)
                {
                    throw new FileOpenException('failed to open chunk: ' . $file);
                }

                if ($preProcessChunk !== null)
                {
                    call_user_func($preProcessChunk, $chunk);
                }

                stream_copy_to_stream($chunk, $fh);
                fclose($chunk);
            }
        }
        catch (\Exception $e)
        {
            if ($local) flock($fh, LOCK_UN);

            fclose($fh);
            throw $e;
        }

        if ($this->config->getDeleteChunksOnSave())
        {
            $this->deleteChunks();
        }

        if ($local) flock($fh, LOCK_UN);

        fclose($fh);

        return true;
    }
}

Comments