1
0
Fork 0
mirror of https://we.phorge.it/source/phorge.git synced 2024-09-19 16:58:48 +02:00

Add a chunking storage engine for files

Summary:
Ref T7149. This isn't complete and isn't active yet, but does basically work. I'll shore it up in the next few diffs.

The new workflow goes like this:

> Client, file.allocate(): I'd like to upload a file with length L, metadata M, and hash H.

Then the server returns `upload` (a boolean) and `filePHID` (a PHID). These mean:

| upload | filePHID | means |
|---|---|---|
| false | false | Server can't accept file.
| false | true | File data already known, file created from hash.
| true | false | Just upload normally.
| true | true | Query chunks to start or resume a chunked upload.

All but the last case are uninteresting and work like exising uploads with `file.uploadhash` (which we can eventually deprecate).

In the last case:

> Client, file.querychunks(): Give me a list of chunks that I should upload.

This returns all the chunks for the file. Chunks have a start byte, an end byte, and a "complete" flag to indicate that the server already has the data.

Then, the client fills in chunks by sending them:

> Client, file.uploadchunk(): Here is the data for one chunk.

This stuff doesn't work yet or has some caveats:

  - I haven't tested resume much.
  - Files need an "isPartial()" flag for partial uploads, and the UI needs to respect it.
  - The JS client needs to become chunk-aware.
  - Chunk size is set crazy low to make testing easier.
  - Some debugging flags that I'll remove soon-ish.
  - Downloading works, but still streams the whole file into memory.
  - This storage engine is disabled by default (hardcoded as a unit test engine) because it's still sketchy.
  - Need some code to remove the "isParital" flag when the last chunk is uploaded.
  - Maybe do checksumming on chunks.

Test Plan:
  - Hacked up `arc upload` (see next diff) to be chunk-aware and uploaded a readme in 18 32-byte chunks. Then downloaded it. Got the same file back that I uploaded.
  - File UI now shows some basic chunk info for chunked files:

{F336434}

Reviewers: btrahan

Reviewed By: btrahan

Subscribers: joshuaspence, epriestley

Maniphest Tasks: T7149

Differential Revision: https://secure.phabricator.com/D12060
This commit is contained in:
epriestley 2015-03-13 11:30:02 -07:00
parent 5339b22751
commit 4aed453b06
13 changed files with 887 additions and 0 deletions

View file

@ -0,0 +1,9 @@
CREATE TABLE {$NAMESPACE}_file.file_chunk (
id INT UNSIGNED NOT NULL AUTO_INCREMENT PRIMARY KEY,
chunkHandle BINARY(12) NOT NULL,
byteStart BIGINT UNSIGNED NOT NULL,
byteEnd BIGINT UNSIGNED NOT NULL,
dataFilePHID VARBINARY(64),
KEY `key_file` (chunkhandle, byteStart, byteEnd),
KEY `key_data` (dataFilePHID)
) ENGINE=InnoDB, COLLATE {$COLLATE_TEXT};

View file

@ -747,12 +747,15 @@ phutil_register_library_map(array(
'FeedPublisherWorker' => 'applications/feed/worker/FeedPublisherWorker.php',
'FeedPushWorker' => 'applications/feed/worker/FeedPushWorker.php',
'FeedQueryConduitAPIMethod' => 'applications/feed/conduit/FeedQueryConduitAPIMethod.php',
'FileAllocateConduitAPIMethod' => 'applications/files/conduit/FileAllocateConduitAPIMethod.php',
'FileConduitAPIMethod' => 'applications/files/conduit/FileConduitAPIMethod.php',
'FileCreateMailReceiver' => 'applications/files/mail/FileCreateMailReceiver.php',
'FileDownloadConduitAPIMethod' => 'applications/files/conduit/FileDownloadConduitAPIMethod.php',
'FileInfoConduitAPIMethod' => 'applications/files/conduit/FileInfoConduitAPIMethod.php',
'FileMailReceiver' => 'applications/files/mail/FileMailReceiver.php',
'FileQueryChunksConduitAPIMethod' => 'applications/files/conduit/FileQueryChunksConduitAPIMethod.php',
'FileReplyHandler' => 'applications/files/mail/FileReplyHandler.php',
'FileUploadChunkConduitAPIMethod' => 'applications/files/conduit/FileUploadChunkConduitAPIMethod.php',
'FileUploadConduitAPIMethod' => 'applications/files/conduit/FileUploadConduitAPIMethod.php',
'FileUploadHashConduitAPIMethod' => 'applications/files/conduit/FileUploadHashConduitAPIMethod.php',
'FilesDefaultViewCapability' => 'applications/files/capability/FilesDefaultViewCapability.php',
@ -1485,6 +1488,7 @@ phutil_register_library_map(array(
'PhabricatorChatLogDAO' => 'applications/chatlog/storage/PhabricatorChatLogDAO.php',
'PhabricatorChatLogEvent' => 'applications/chatlog/storage/PhabricatorChatLogEvent.php',
'PhabricatorChatLogQuery' => 'applications/chatlog/query/PhabricatorChatLogQuery.php',
'PhabricatorChunkedFileStorageEngine' => 'applications/files/engine/PhabricatorChunkedFileStorageEngine.php',
'PhabricatorClusterConfigOptions' => 'applications/config/option/PhabricatorClusterConfigOptions.php',
'PhabricatorCommitBranchesField' => 'applications/repository/customfield/PhabricatorCommitBranchesField.php',
'PhabricatorCommitCustomField' => 'applications/repository/customfield/PhabricatorCommitCustomField.php',
@ -1790,6 +1794,8 @@ phutil_register_library_map(array(
'PhabricatorFeedStoryReference' => 'applications/feed/storage/PhabricatorFeedStoryReference.php',
'PhabricatorFile' => 'applications/files/storage/PhabricatorFile.php',
'PhabricatorFileBundleLoader' => 'applications/files/query/PhabricatorFileBundleLoader.php',
'PhabricatorFileChunk' => 'applications/files/storage/PhabricatorFileChunk.php',
'PhabricatorFileChunkQuery' => 'applications/files/query/PhabricatorFileChunkQuery.php',
'PhabricatorFileCommentController' => 'applications/files/controller/PhabricatorFileCommentController.php',
'PhabricatorFileComposeController' => 'applications/files/controller/PhabricatorFileComposeController.php',
'PhabricatorFileController' => 'applications/files/controller/PhabricatorFileController.php',
@ -3908,12 +3914,15 @@ phutil_register_library_map(array(
'FeedPublisherWorker' => 'FeedPushWorker',
'FeedPushWorker' => 'PhabricatorWorker',
'FeedQueryConduitAPIMethod' => 'FeedConduitAPIMethod',
'FileAllocateConduitAPIMethod' => 'FileConduitAPIMethod',
'FileConduitAPIMethod' => 'ConduitAPIMethod',
'FileCreateMailReceiver' => 'PhabricatorMailReceiver',
'FileDownloadConduitAPIMethod' => 'FileConduitAPIMethod',
'FileInfoConduitAPIMethod' => 'FileConduitAPIMethod',
'FileMailReceiver' => 'PhabricatorObjectMailReceiver',
'FileQueryChunksConduitAPIMethod' => 'FileConduitAPIMethod',
'FileReplyHandler' => 'PhabricatorMailReplyHandler',
'FileUploadChunkConduitAPIMethod' => 'FileConduitAPIMethod',
'FileUploadConduitAPIMethod' => 'FileConduitAPIMethod',
'FileUploadHashConduitAPIMethod' => 'FileConduitAPIMethod',
'FilesDefaultViewCapability' => 'PhabricatorPolicyCapability',
@ -4748,6 +4757,7 @@ phutil_register_library_map(array(
'PhabricatorPolicyInterface',
),
'PhabricatorChatLogQuery' => 'PhabricatorCursorPagedPolicyAwareQuery',
'PhabricatorChunkedFileStorageEngine' => 'PhabricatorFileStorageEngine',
'PhabricatorClusterConfigOptions' => 'PhabricatorApplicationConfigOptions',
'PhabricatorCommitBranchesField' => 'PhabricatorCommitCustomField',
'PhabricatorCommitCustomField' => 'PhabricatorCustomField',
@ -5081,6 +5091,12 @@ phutil_register_library_map(array(
'PhabricatorPolicyInterface',
'PhabricatorDestructibleInterface',
),
'PhabricatorFileChunk' => array(
'PhabricatorFileDAO',
'PhabricatorPolicyInterface',
'PhabricatorDestructibleInterface',
),
'PhabricatorFileChunkQuery' => 'PhabricatorCursorPagedPolicyAwareQuery',
'PhabricatorFileCommentController' => 'PhabricatorFileController',
'PhabricatorFileComposeController' => 'PhabricatorFileController',
'PhabricatorFileController' => 'PhabricatorController',

View file

@ -0,0 +1,131 @@
<?php
final class FileAllocateConduitAPIMethod
extends FileConduitAPIMethod {
public function getAPIMethodName() {
return 'file.allocate';
}
public function getMethodDescription() {
return pht('Prepare to upload a file.');
}
public function defineParamTypes() {
return array(
'name' => 'string',
'contentLength' => 'int',
'contentHash' => 'optional string',
'viewPolicy' => 'optional string',
// TODO: Remove this, it's just here to make testing easier.
'forceChunking' => 'optional bool',
);
}
public function defineReturnType() {
return 'map<string, wild>';
}
public function defineErrorTypes() {
return array();
}
protected function execute(ConduitAPIRequest $request) {
$viewer = $request->getUser();
$hash = $request->getValue('contentHash');
$name = $request->getValue('name');
$view_policy = $request->getValue('viewPolicy');
$content_length = $request->getValue('contentLength');
$force_chunking = $request->getValue('forceChunking');
$properties = array(
'name' => $name,
'authorPHID' => $viewer->getPHID(),
'viewPolicy' => $view_policy,
'isExplicitUpload' => true,
);
if ($hash) {
$file = PhabricatorFile::newFileFromContentHash(
$hash,
$properties);
if ($file && !$force_chunking) {
return array(
'upload' => false,
'filePHID' => $file->getPHID(),
);
}
$chunked_hash = PhabricatorChunkedFileStorageEngine::getChunkedHash(
$viewer,
$hash);
$file = id(new PhabricatorFileQuery())
->setViewer($viewer)
->withContentHashes(array($chunked_hash))
->executeOne();
if ($file) {
return array(
'upload' => $file->isPartial(),
'filePHID' => $file->getPHID(),
);
}
}
$engines = PhabricatorFileStorageEngine::loadStorageEngines(
$content_length);
if ($engines) {
if ($force_chunking) {
foreach ($engines as $key => $engine) {
if (!$engine->isChunkEngine()) {
unset($engines[$key]);
}
}
}
// Pick the first engine. If the file is small enough to fit into a
// single engine without chunking, this will be a non-chunk engine and
// we'll just tell the client to upload the file.
$engine = head($engines);
if ($engine) {
if (!$engine->isChunkEngine()) {
return array(
'upload' => true,
'filePHID' => null,
);
}
// Otherwise, this is a large file and we need to perform a chunked
// upload.
$chunk_properties = array();
if ($hash) {
$chunk_properties += array(
'chunkedHash' => $chunked_hash,
);
}
$file = $engine->allocateChunks($content_length, $chunk_properties);
return array(
'upload' => true,
'filePHID' => $file->getPHID(),
);
}
}
// None of the storage engines can accept this file.
return array(
'upload' => false,
'filePHID' => null,
);
}
}

View file

@ -6,4 +6,104 @@ abstract class FileConduitAPIMethod extends ConduitAPIMethod {
return PhabricatorApplication::getByClass('PhabricatorFilesApplication');
}
protected function loadFileByPHID(PhabricatorUser $viewer, $file_phid) {
$file = id(new PhabricatorFileQuery())
->setViewer($viewer)
->withPHIDs(array($file_phid))
->executeOne();
if (!$file) {
throw new Exception(pht('No such file "%s"!', $file_phid));
}
return $file;
}
protected function loadFileChunks(
PhabricatorUser $viewer,
PhabricatorFile $file) {
return $this->newChunkQuery($viewer, $file)
->execute();
}
protected function loadFileChunkForUpload(
PhabricatorUser $viewer,
PhabricatorFile $file,
$start,
$end) {
$start = (int)$start;
$end = (int)$end;
$chunks = $this->newChunkQuery($viewer, $file)
->withByteRange($start, $end)
->execute();
if (!$chunks) {
throw new Exception(
pht(
'There are no file data chunks in byte range %d - %d.',
$start,
$end));
}
if (count($chunks) !== 1) {
phlog($chunks);
throw new Exception(
pht(
'There are multiple chunks in byte range %d - %d.',
$start,
$end));
}
$chunk = head($chunks);
if ($chunk->getByteStart() != $start) {
throw new Exception(
pht(
'Chunk start byte is %d, not %d.',
$chunk->getByteStart(),
$start));
}
if ($chunk->getByteEnd() != $end) {
throw new Exception(
pht(
'Chunk end byte is %d, not %d.',
$chunk->getByteEnd(),
$end));
}
if ($chunk->getDataFilePHID()) {
throw new Exception(
pht(
'Chunk has already been uploaded.'));
}
return $chunk;
}
protected function decodeBase64($data) {
$data = base64_decode($data, $strict = true);
if ($data === false) {
throw new Exception(pht('Unable to decode base64 data!'));
}
return $data;
}
private function newChunkQuery(
PhabricatorUser $viewer,
PhabricatorFile $file) {
$engine = $file->instantiateStorageEngine();
if (!$engine->isChunkEngine()) {
throw new Exception(
pht(
'File "%s" does not have chunks!',
$file->getPHID()));
}
return id(new PhabricatorFileChunkQuery())
->setViewer($viewer)
->withChunkHandles(array($file->getStorageHandle()));
}
}

View file

@ -0,0 +1,47 @@
<?php
final class FileQueryChunksConduitAPIMethod
extends FileConduitAPIMethod {
public function getAPIMethodName() {
return 'file.querychunks';
}
public function getMethodDescription() {
return pht('Get information about file chunks.');
}
public function defineParamTypes() {
return array(
'filePHID' => 'phid',
);
}
public function defineReturnType() {
return 'list<wild>';
}
public function defineErrorTypes() {
return array();
}
protected function execute(ConduitAPIRequest $request) {
$viewer = $request->getUser();
$file_phid = $request->getValue('filePHID');
$file = $this->loadFileByPHID($viewer, $file_phid);
$chunks = $this->loadFileChunks($viewer, $file);
$results = array();
foreach ($chunks as $chunk) {
$results[] = array(
'byteStart' => $chunk->getByteStart(),
'byteEnd' => $chunk->getByteEnd(),
'complete' => (bool)$chunk->getDataFilePHID(),
);
}
return $results;
}
}

View file

@ -0,0 +1,74 @@
<?php
final class FileUploadChunkConduitAPIMethod
extends FileConduitAPIMethod {
public function getAPIMethodName() {
return 'file.uploadchunk';
}
public function getMethodDescription() {
return pht('Upload a chunk of file data to the server.');
}
public function defineParamTypes() {
return array(
'filePHID' => 'phid',
'byteStart' => 'int',
'data' => 'string',
'dataEncoding' => 'string',
);
}
public function defineReturnType() {
return 'void';
}
public function defineErrorTypes() {
return array();
}
protected function execute(ConduitAPIRequest $request) {
$viewer = $request->getUser();
$file_phid = $request->getValue('filePHID');
$file = $this->loadFileByPHID($viewer, $file_phid);
$start = $request->getValue('byteStart');
$data = $request->getValue('data');
$encoding = $request->getValue('dataEncoding');
switch ($encoding) {
case 'base64':
$data = $this->decodeBase64($data);
break;
case null:
break;
default:
throw new Exception(pht('Unsupported data encoding.'));
}
$length = strlen($data);
$chunk = $this->loadFileChunkForUpload(
$viewer,
$file,
$start,
$start + $length);
// NOTE: These files have a view policy which prevents normal access. They
// are only accessed through the storage engine.
$file = PhabricatorFile::newFromFileData(
$data,
array(
'name' => $file->getMonogram().'.chunk-'.$chunk->getID(),
'viewPolicy' => PhabricatorPolicies::POLICY_NOONE,
));
$chunk->setDataFilePHID($file->getPHID())->save();
// TODO: If all chunks are up, mark the file as complete.
return null;
}
}

View file

@ -3,6 +3,7 @@
final class FileUploadHashConduitAPIMethod extends FileConduitAPIMethod {
public function getAPIMethodName() {
// TODO: Deprecate this in favor of `file.allocate`.
return 'file.uploadhash';
}

View file

@ -295,6 +295,67 @@ final class PhabricatorFileInfoController extends PhabricatorFileController {
$box->addPropertyList($media);
}
$engine = null;
try {
$engine = $file->instantiateStorageEngine();
} catch (Exception $ex) {
// Don't bother raising this anywhere for now.
}
if ($engine) {
if ($engine->isChunkEngine()) {
$chunkinfo = new PHUIPropertyListView();
$box->addPropertyList($chunkinfo, pht('Chunks'));
$chunks = id(new PhabricatorFileChunkQuery())
->setViewer($user)
->withChunkHandles(array($file->getStorageHandle()))
->execute();
$chunks = msort($chunks, 'getByteStart');
$rows = array();
$completed = array();
foreach ($chunks as $chunk) {
$is_complete = $chunk->getDataFilePHID();
$rows[] = array(
$chunk->getByteStart(),
$chunk->getByteEnd(),
($is_complete ? pht('Yes') : pht('No')),
);
if ($is_complete) {
$completed[] = $chunk;
}
}
$table = id(new AphrontTableView($rows))
->setHeaders(
array(
pht('Offset'),
pht('End'),
pht('Complete'),
))
->setColumnClasses(
array(
'',
'',
'wide',
));
$chunkinfo->addProperty(
pht('Total Chunks'),
count($chunks));
$chunkinfo->addProperty(
pht('Completed Chunks'),
count($completed));
$chunkinfo->addRawContent($table);
}
}
}
}

View file

@ -0,0 +1,171 @@
<?php
final class PhabricatorChunkedFileStorageEngine
extends PhabricatorFileStorageEngine {
public function getEngineIdentifier() {
return 'chunks';
}
public function getEnginePriority() {
return 60000;
}
/**
* We can write chunks if we have at least one valid storage engine
* underneath us.
*
* This engine must not also be a chunk engine.
*/
public function canWriteFiles() {
return (bool)$this->getWritableEngine();
}
public function hasFilesizeLimit() {
return false;
}
public function isChunkEngine() {
return true;
}
public function isTestEngine() {
// TODO: For now, prevent this from actually being selected.
return true;
}
public function writeFile($data, array $params) {
// The chunk engine does not support direct writes.
throw new PhutilMethodNotImplementedException();
}
public function readFile($handle) {
// This is inefficient, but makes the API work as expected.
$chunks = $this->loadAllChunks($handle, true);
$buffer = '';
foreach ($chunks as $chunk) {
$data_file = $chunk->getDataFile();
if (!$data_file) {
throw new Exception(pht('This file data is incomplete!'));
}
$buffer .= $chunk->getDataFile()->loadFileData();
}
return $buffer;
}
public function deleteFile($handle) {
$engine = new PhabricatorDestructionEngine();
$chunks = $this->loadAllChunks($handle);
foreach ($chunks as $chunk) {
$engine->destroyObject($chunk);
}
}
private function loadAllChunks($handle, $need_files) {
$chunks = id(new PhabricatorFileChunkQuery())
->setViewer(PhabricatorUser::getOmnipotentUser())
->withChunkHandles(array($handle))
->needDataFiles($need_files)
->execute();
$chunks = msort($chunks, 'getByteStart');
return $chunks;
}
/**
* Compute a chunked file hash for the viewer.
*
* We can not currently compute a real hash for chunked file uploads (because
* no process sees all of the file data).
*
* We also can not trust the hash that the user claims to have computed. If
* we trust the user, they can upload some `evil.exe` and claim it has the
* same file hash as `good.exe`. When another user later uploads the real
* `good.exe`, we'll just create a reference to the existing `evil.exe`. Users
* who download `good.exe` will then receive `evil.exe`.
*
* Instead, we rehash the user's claimed hash with account secrets. This
* allows users to resume file uploads, but not collide with other users.
*
* Ideally, we'd like to be able to verify hashes, but this is complicated
* and time consuming and gives us a fairly small benefit.
*
* @param PhabricatorUser Viewing user.
* @param string Claimed file hash.
* @return string Rehashed file hash.
*/
public static function getChunkedHash(PhabricatorUser $viewer, $hash) {
if (!$viewer->getPHID()) {
throw new Exception(
pht('Unable to compute chunked hash without real viewer!'));
}
$input = $viewer->getAccountSecret().':'.$hash.':'.$viewer->getPHID();
return PhabricatorHash::digest($input);
}
public function allocateChunks($length, array $properties) {
$file = PhabricatorFile::newChunkedFile($this, $length, $properties);
$chunk_size = $this->getChunkSize();
$handle = $file->getStorageHandle();
$chunks = array();
for ($ii = 0; $ii < $length; $ii += $chunk_size) {
$chunks[] = PhabricatorFileChunk::initializeNewChunk(
$handle,
$ii,
min($ii + $chunk_size, $length));
}
$file->openTransaction();
foreach ($chunks as $chunk) {
$chunk->save();
}
$file->save();
$file->saveTransaction();
return $file;
}
private function getWritableEngine() {
// NOTE: We can't just load writable engines or we'll loop forever.
$engines = PhabricatorFileStorageEngine::loadAllEngines();
foreach ($engines as $engine) {
if ($engine->isChunkEngine()) {
continue;
}
if ($engine->isTestEngine()) {
continue;
}
if (!$engine->canWriteFiles()) {
continue;
}
if ($engine->hasFilesizeLimit()) {
if ($engine->getFilesizeLimit() < $this->getChunkSize()) {
continue;
}
}
return true;
}
return false;
}
private function getChunkSize() {
// TODO: This is an artificially small size to make it easier to
// test chunking.
return 32;
}
}

View file

@ -113,6 +113,21 @@ abstract class PhabricatorFileStorageEngine {
}
/**
* Identifies chunking storage engines.
*
* If this is a storage engine which splits files into chunks and stores the
* chunks in other engines, it can return `true` to signal that other
* chunking engines should not try to store data here.
*
* @return bool True if this is a chunk engine.
* @task meta
*/
public function isChunkEngine() {
return false;
}
/* -( Managing File Data )------------------------------------------------- */

View file

@ -0,0 +1,116 @@
<?php
final class PhabricatorFileChunkQuery
extends PhabricatorCursorPagedPolicyAwareQuery {
private $chunkHandles;
private $rangeStart;
private $rangeEnd;
private $needDataFiles;
public function withChunkHandles(array $handles) {
$this->chunkHandles = $handles;
return $this;
}
public function withByteRange($start, $end) {
$this->rangeStart = $start;
$this->rangeEnd = $end;
return $this;
}
public function needDataFiles($need) {
$this->needDataFiles = $need;
return $this;
}
protected function loadPage() {
$table = new PhabricatorFileChunk();
$conn_r = $table->establishConnection('r');
$data = queryfx_all(
$conn_r,
'SELECT * FROM %T %Q %Q %Q',
$table->getTableName(),
$this->buildWhereClause($conn_r),
$this->buildOrderClause($conn_r),
$this->buildLimitClause($conn_r));
return $table->loadAllFromArray($data);
}
protected function willFilterPage(array $chunks) {
if ($this->needDataFiles) {
$file_phids = mpull($chunks, 'getDataFilePHID');
$file_phids = array_filter($file_phids);
if ($file_phids) {
$files = id(new PhabricatorFileQuery())
->setViewer($this->getViewer())
->setParentQuery($this)
->withPHIDs($file_phids)
->execute();
$files = mpull($files, null, 'getPHID');
} else {
$files = array();
}
foreach ($chunks as $key => $chunk) {
$data_phid = $chunk->getDataFilePHID();
if (!$data_phid) {
$chunk->attachDataFile(null);
continue;
}
$file = idx($files, $data_phid);
if (!$file) {
unset($chunks[$key]);
$this->didRejectResult($chunk);
continue;
}
$chunk->attachDataFile($file);
}
if (!$chunks) {
return $chunks;
}
}
return $chunks;
}
private function buildWhereClause(AphrontDatabaseConnection $conn_r) {
$where = array();
if ($this->chunkHandles !== null) {
$where[] = qsprintf(
$conn_r,
'chunkHandle IN (%Ls)',
$this->chunkHandles);
}
if ($this->rangeStart !== null) {
$where[] = qsprintf(
$conn_r,
'byteEnd > %d',
$this->rangeStart);
}
if ($this->rangeEnd !== null) {
$where[] = qsprintf(
$conn_r,
'byteStart < %d',
$this->rangeEnd);
}
$where[] = $this->buildPagingClause($conn_r);
return $this->formatWhereClause($where);
}
public function getQueryApplicationClass() {
return 'PhabricatorFilesApplication';
}
}

View file

@ -33,6 +33,7 @@ final class PhabricatorFile extends PhabricatorFileDAO
const METADATA_IMAGE_HEIGHT = 'height';
const METADATA_CAN_CDN = 'canCDN';
const METADATA_BUILTIN = 'builtin';
const METADATA_PARTIAL = 'partial';
protected $name;
protected $mimeType;
@ -264,6 +265,41 @@ final class PhabricatorFile extends PhabricatorFileDAO
return $file;
}
public static function newChunkedFile(
PhabricatorFileStorageEngine $engine,
$length,
array $params) {
$file = PhabricatorFile::initializeNewFile();
$file->setByteSize($length);
// TODO: We might be able to test the first chunk in order to figure
// this out more reliably, since MIME detection usually examines headers.
// However, enormous files are probably always either actually raw data
// or reasonable to treat like raw data.
$file->setMimeType('application/octet-stream');
$chunked_hash = idx($params, 'chunkedHash');
if ($chunked_hash) {
$file->setContentHash($chunked_hash);
} else {
// See PhabricatorChunkedFileStorageEngine::getChunkedHash() for some
// discussion of this.
$file->setContentHash(
PhabricatorHash::digest(
Filesystem::readRandomBytes(64)));
}
$file->setStorageEngine($engine->getEngineIdentifier());
$file->setStorageHandle(PhabricatorFileChunk::newChunkHandle());
$file->setStorageFormat(self::STORAGE_FORMAT_RAW);
$file->readPropertiesFromParameters($params);
return $file;
}
private static function buildFromFileData($data, array $params = array()) {
if (isset($params['storageEngines'])) {
@ -1134,6 +1170,11 @@ final class PhabricatorFile extends PhabricatorFileDAO
->setURI($uri);
}
public function isPartial() {
// TODO: Placeholder for resumable uploads.
return false;
}
/* -( PhabricatorApplicationTransactionInterface )------------------------- */

View file

@ -0,0 +1,105 @@
<?php
final class PhabricatorFileChunk extends PhabricatorFileDAO
implements
PhabricatorPolicyInterface,
PhabricatorDestructibleInterface {
protected $chunkHandle;
protected $byteStart;
protected $byteEnd;
protected $dataFilePHID;
private $dataFile = self::ATTACHABLE;
protected function getConfiguration() {
return array(
self::CONFIG_TIMESTAMPS => false,
self::CONFIG_COLUMN_SCHEMA => array(
'chunkHandle' => 'bytes12',
'byteStart' => 'uint64',
'byteEnd' => 'uint64',
'dataFilePHID' => 'phid?',
),
self::CONFIG_KEY_SCHEMA => array(
'key_file' => array(
'columns' => array('chunkHandle', 'byteStart', 'byteEnd'),
),
'key_data' => array(
'columns' => array('dataFilePHID'),
),
),
) + parent::getConfiguration();
}
public static function newChunkHandle() {
$seed = Filesystem::readRandomBytes(64);
return PhabricatorHash::digestForIndex($seed);
}
public static function initializeNewChunk($handle, $start, $end) {
return id(new PhabricatorFileChunk())
->setChunkHandle($handle)
->setByteStart($start)
->setByteEnd($end);
}
public function attachDataFile(PhabricatorFile $file = null) {
$this->dataFile = $file;
return $this;
}
public function getDataFile() {
return $this->assertAttached($this->dataFile);
}
/* -( PhabricatorPolicyInterface )----------------------------------------- */
public function getCapabilities() {
return array(
PhabricatorPolicyCapability::CAN_VIEW,
);
}
public function getPolicy($capability) {
// These objects are low-level and only accessed through the storage
// engine, so policies are mostly just in place to let us use the common
// query infrastructure.
return PhabricatorPolicies::getMostOpenPolicy();
}
public function hasAutomaticCapability($capability, PhabricatorUser $viewer) {
return false;
}
public function describeAutomaticCapability($capability) {
return null;
}
/* -( PhabricatorDestructibleInterface )----------------------------------- */
public function destroyObjectPermanently(
PhabricatorDestructionEngine $engine) {
$data_phid = $this->getDataFilePHID();
if ($data_phid) {
$data_file = id(new PhabricatorFileQuery())
->setViewer(PhabricatorUser::getOmnipotentUser())
->withPHIDs(array($data_phid))
->executeOne();
if ($data_file) {
$engine->destroyObject($data_file);
}
}
$this->delete();
}
}