1
0
Fork 0
mirror of https://we.phorge.it/source/phorge.git synced 2024-11-10 00:42:41 +01:00

Support resuming JS uploads of chunked files

Summary: Ref T7149. We can't compute hashes of large files efficiently, but we can resume uploads by the same author, with the same name and file size, which are only partially completed. This seems like a reasonable heuristic that is unlikely to ever misfire, even if it's a little magical.

Test Plan:
  - Forced chunking on.
  - Started uploading a chunked file.
  - Closed the browser window.
  - Dropped it into a new window.
  - Upload resumed //(!!!)//
  - Did this again.
  - Downloaded the final file, which successfully reconstructed the original file.

Reviewers: btrahan

Reviewed By: btrahan

Subscribers: joshuaspence, chad, epriestley

Maniphest Tasks: T7149

Differential Revision: https://secure.phabricator.com/D12070
This commit is contained in:
epriestley 2015-03-14 08:28:46 -07:00
parent aa909ba072
commit 32d8d67535
5 changed files with 101 additions and 30 deletions

View file

@ -44,7 +44,7 @@ return array(
'rsrc/css/application/config/config-welcome.css' => '6abd79be',
'rsrc/css/application/config/setup-issue.css' => '22270af2',
'rsrc/css/application/config/unhandled-exception.css' => '37d4f9a2',
'rsrc/css/application/conpherence/durable-column.css' => '1f5c64e8',
'rsrc/css/application/conpherence/durable-column.css' => '8c951609',
'rsrc/css/application/conpherence/menu.css' => 'c6ac5299',
'rsrc/css/application/conpherence/message-pane.css' => '5930260a',
'rsrc/css/application/conpherence/notification.css' => '04a6e10a',
@ -513,7 +513,7 @@ return array(
'changeset-view-manager' => '88be0133',
'config-options-css' => '7fedf08b',
'config-welcome-css' => '6abd79be',
'conpherence-durable-column-view' => '1f5c64e8',
'conpherence-durable-column-view' => '8c951609',
'conpherence-menu-css' => 'c6ac5299',
'conpherence-message-pane-css' => '5930260a',
'conpherence-notification-css' => '04a6e10a',

View file

@ -37,7 +37,7 @@ final class FileAllocateConduitAPIMethod
$hash = $request->getValue('contentHash');
$name = $request->getValue('name');
$view_policy = $request->getValue('viewPolicy');
$content_length = $request->getValue('contentLength');
$length = $request->getValue('contentLength');
$force_chunking = $request->getValue('forceChunking');
@ -48,18 +48,14 @@ final class FileAllocateConduitAPIMethod
'isExplicitUpload' => true,
);
$file = null;
if ($hash) {
$file = PhabricatorFile::newFileFromContentHash(
$hash,
$properties);
}
if ($file) {
return array(
'upload' => false,
'filePHID' => $file->getPHID(),
);
}
if ($hash && !$file) {
$chunked_hash = PhabricatorChunkedFileStorageEngine::getChunkedHash(
$viewer,
$hash);
@ -67,17 +63,34 @@ final class FileAllocateConduitAPIMethod
->setViewer($viewer)
->withContentHashes(array($chunked_hash))
->executeOne();
}
if ($file) {
return array(
'upload' => (bool)$file->getIsPartial(),
'filePHID' => $file->getPHID(),
);
if (strlen($name) && !$hash && !$file) {
if ($length > PhabricatorFileStorageEngine::getChunkThreshold()) {
// If we don't have a hash, but this file is large enough to store in
// chunks and thus may be resumable, try to find a partially uploaded
// file by the same author with the same name and same length. This
// allows us to resume uploads in Javascript where we can't efficiently
// compute file hashes.
$file = id(new PhabricatorFileQuery())
->setViewer($viewer)
->withAuthorPHIDs(array($viewer->getPHID()))
->withNames(array($name))
->withLengthBetween($length, $length)
->withIsPartial(true)
->setLimit(1)
->executeOne();
}
}
$engines = PhabricatorFileStorageEngine::loadStorageEngines(
$content_length);
if ($file) {
return array(
'upload' => (bool)$file->getIsPartial(),
'filePHID' => $file->getPHID(),
);
}
$engines = PhabricatorFileStorageEngine::loadStorageEngines($length);
if ($engines) {
if ($force_chunking) {
@ -111,7 +124,7 @@ final class FileAllocateConduitAPIMethod
);
}
$file = $engine->allocateChunks($content_length, $chunk_properties);
$file = $engine->allocateChunks($length, $chunk_properties);
return array(
'upload' => true,

View file

@ -105,7 +105,16 @@ final class PhabricatorChunkedFileStorageEngine
}
$input = $viewer->getAccountSecret().':'.$hash.':'.$viewer->getPHID();
return PhabricatorHash::digest($input);
return self::getChunkedHashForInput($input);
}
public static function getChunkedHashForInput($input) {
$rehash = PhabricatorHash::digest($input);
// Add a suffix to identify this as a chunk hash.
$rehash = substr($rehash, 0, -2).'-C';
return $rehash;
}
public function allocateChunks($length, array $properties) {

View file

@ -11,6 +11,10 @@ final class PhabricatorFileQuery
private $dateCreatedAfter;
private $dateCreatedBefore;
private $contentHashes;
private $minLength;
private $maxLength;
private $names;
private $isPartial;
public function withIDs(array $ids) {
$this->ids = $ids;
@ -89,6 +93,22 @@ final class PhabricatorFileQuery
return $this;
}
public function withLengthBetween($min, $max) {
$this->minLength = $min;
$this->maxLength = $max;
return $this;
}
public function withNames(array $names) {
$this->names = $names;
return $this;
}
public function withIsPartial($partial) {
$this->isPartial = $partial;
return $this;
}
public function showOnlyExplicitUploads($explicit_uploads) {
$this->explicitUploads = $explicit_uploads;
return $this;
@ -213,34 +233,34 @@ final class PhabricatorFileQuery
$where[] = $this->buildPagingClause($conn_r);
if ($this->ids) {
if ($this->ids !== null) {
$where[] = qsprintf(
$conn_r,
'f.id IN (%Ld)',
$this->ids);
}
if ($this->phids) {
if ($this->phids !== null) {
$where[] = qsprintf(
$conn_r,
'f.phid IN (%Ls)',
$this->phids);
}
if ($this->authorPHIDs) {
if ($this->authorPHIDs !== null) {
$where[] = qsprintf(
$conn_r,
'f.authorPHID IN (%Ls)',
$this->authorPHIDs);
}
if ($this->explicitUploads) {
if ($this->explicitUploads !== null) {
$where[] = qsprintf(
$conn_r,
'f.isExplicitUpload = true');
}
if ($this->transforms) {
if ($this->transforms !== null) {
$clauses = array();
foreach ($this->transforms as $transform) {
if ($transform['transform'] === true) {
@ -259,27 +279,55 @@ final class PhabricatorFileQuery
$where[] = qsprintf($conn_r, '(%Q)', implode(') OR (', $clauses));
}
if ($this->dateCreatedAfter) {
if ($this->dateCreatedAfter !== null) {
$where[] = qsprintf(
$conn_r,
'f.dateCreated >= %d',
$this->dateCreatedAfter);
}
if ($this->dateCreatedBefore) {
if ($this->dateCreatedBefore !== null) {
$where[] = qsprintf(
$conn_r,
'f.dateCreated <= %d',
$this->dateCreatedBefore);
}
if ($this->contentHashes) {
if ($this->contentHashes !== null) {
$where[] = qsprintf(
$conn_r,
'f.contentHash IN (%Ls)',
$this->contentHashes);
}
if ($this->minLength !== null) {
$where[] = qsprintf(
$conn_r,
'byteSize >= %d',
$this->minLength);
}
if ($this->maxLength !== null) {
$where[] = qsprintf(
$conn_r,
'byteSize <= %d',
$this->maxLength);
}
if ($this->names !== null) {
$where[] = qsprintf(
$conn_r,
'name in (%Ls)',
$this->names);
}
if ($this->isPartial !== null) {
$where[] = qsprintf(
$conn_r,
'isPartial = %d',
(int)$this->isPartial);
}
return $this->formatWhereClause($where);
}

View file

@ -292,9 +292,10 @@ final class PhabricatorFile extends PhabricatorFileDAO
} else {
// See PhabricatorChunkedFileStorageEngine::getChunkedHash() for some
// discussion of this.
$file->setContentHash(
PhabricatorHash::digest(
Filesystem::readRandomBytes(64)));
$seed = Filesystem::readRandomBytes(64);
$hash = PhabricatorChunkedFileStorageEngine::getChunkedHashForInput(
$seed);
$file->setContentHash($hash);
}
$file->setStorageEngine($engine->getEngineIdentifier());