mirror of
https://we.phorge.it/source/phorge.git
synced 2025-01-15 17:21:10 +01:00
6b69bc3fbb
Summary: Ref T7149. This was just to make testing easier, but chunking substantially works now. Test Plan: `grep` Reviewers: btrahan Reviewed By: btrahan Subscribers: epriestley Maniphest Tasks: T7149 Differential Revision: https://secure.phabricator.com/D12076
141 lines
3.8 KiB
PHP
141 lines
3.8 KiB
PHP
<?php
|
|
|
|
final class FileAllocateConduitAPIMethod
|
|
extends FileConduitAPIMethod {
|
|
|
|
public function getAPIMethodName() {
|
|
return 'file.allocate';
|
|
}
|
|
|
|
public function getMethodDescription() {
|
|
return pht('Prepare to upload a file.');
|
|
}
|
|
|
|
public function defineParamTypes() {
|
|
return array(
|
|
'name' => 'string',
|
|
'contentLength' => 'int',
|
|
'contentHash' => 'optional string',
|
|
'viewPolicy' => 'optional string',
|
|
);
|
|
}
|
|
|
|
public function defineReturnType() {
|
|
return 'map<string, wild>';
|
|
}
|
|
|
|
public function defineErrorTypes() {
|
|
return array();
|
|
}
|
|
|
|
protected function execute(ConduitAPIRequest $request) {
|
|
$viewer = $request->getUser();
|
|
|
|
$hash = $request->getValue('contentHash');
|
|
$name = $request->getValue('name');
|
|
$view_policy = $request->getValue('viewPolicy');
|
|
$length = $request->getValue('contentLength');
|
|
|
|
$properties = array(
|
|
'name' => $name,
|
|
'authorPHID' => $viewer->getPHID(),
|
|
'viewPolicy' => $view_policy,
|
|
'isExplicitUpload' => true,
|
|
);
|
|
|
|
$file = null;
|
|
if ($hash) {
|
|
$file = PhabricatorFile::newFileFromContentHash(
|
|
$hash,
|
|
$properties);
|
|
}
|
|
|
|
if ($hash && !$file) {
|
|
$chunked_hash = PhabricatorChunkedFileStorageEngine::getChunkedHash(
|
|
$viewer,
|
|
$hash);
|
|
$file = id(new PhabricatorFileQuery())
|
|
->setViewer($viewer)
|
|
->withContentHashes(array($chunked_hash))
|
|
->executeOne();
|
|
}
|
|
|
|
if (strlen($name) && !$hash && !$file) {
|
|
if ($length > PhabricatorFileStorageEngine::getChunkThreshold()) {
|
|
// If we don't have a hash, but this file is large enough to store in
|
|
// chunks and thus may be resumable, try to find a partially uploaded
|
|
// file by the same author with the same name and same length. This
|
|
// allows us to resume uploads in Javascript where we can't efficiently
|
|
// compute file hashes.
|
|
$file = id(new PhabricatorFileQuery())
|
|
->setViewer($viewer)
|
|
->withAuthorPHIDs(array($viewer->getPHID()))
|
|
->withNames(array($name))
|
|
->withLengthBetween($length, $length)
|
|
->withIsPartial(true)
|
|
->setLimit(1)
|
|
->executeOne();
|
|
}
|
|
}
|
|
|
|
if ($file) {
|
|
return array(
|
|
'upload' => (bool)$file->getIsPartial(),
|
|
'filePHID' => $file->getPHID(),
|
|
);
|
|
}
|
|
|
|
$engines = PhabricatorFileStorageEngine::loadStorageEngines($length);
|
|
if ($engines) {
|
|
|
|
// Pick the first engine. If the file is small enough to fit into a
|
|
// single engine without chunking, this will be a non-chunk engine and
|
|
// we'll just tell the client to upload the file.
|
|
$engine = head($engines);
|
|
if ($engine) {
|
|
if (!$engine->isChunkEngine()) {
|
|
return array(
|
|
'upload' => true,
|
|
'filePHID' => null,
|
|
);
|
|
}
|
|
|
|
// Otherwise, this is a large file and we need to perform a chunked
|
|
// upload.
|
|
|
|
$chunk_properties = $properties;
|
|
|
|
if ($hash) {
|
|
$chunk_properties += array(
|
|
'chunkedHash' => $chunked_hash,
|
|
);
|
|
}
|
|
|
|
$file = $engine->allocateChunks($length, $chunk_properties);
|
|
|
|
return array(
|
|
'upload' => true,
|
|
'filePHID' => $file->getPHID(),
|
|
);
|
|
}
|
|
}
|
|
|
|
// None of the storage engines can accept this file.
|
|
if (PhabricatorFileStorageEngine::loadWritableEngines()) {
|
|
$error = pht(
|
|
'Unable to upload file: this file is too large for any '.
|
|
'configured storage engine.');
|
|
} else {
|
|
$error = pht(
|
|
'Unable to upload file: the server is not configured with any '.
|
|
'writable storage engines.');
|
|
}
|
|
|
|
return array(
|
|
'upload' => false,
|
|
'filePHID' => null,
|
|
'error' => $error,
|
|
);
|
|
}
|
|
|
|
}
|