@recaptime-dev's working patches + fork for Phorge, a community fork of Phabricator. (Upstream dev and stable branches are at upstream/main and upstream/stable respectively.) hq.recaptime.dev/wiki/Phorge
phorge phabricator
at upstream/main 197 lines 5.4 kB view raw
1<?php 2 3final class PhabricatorChunkedFileStorageEngine 4 extends PhabricatorFileStorageEngine { 5 6 public function getEngineIdentifier() { 7 return 'chunks'; 8 } 9 10 public function getEnginePriority() { 11 return 60000; 12 } 13 14 /** 15 * We can write chunks if we have at least one valid storage engine 16 * underneath us. 17 */ 18 public function canWriteFiles() { 19 return (bool)$this->getWritableEngine(); 20 } 21 22 public function hasFilesizeLimit() { 23 return false; 24 } 25 26 public function isChunkEngine() { 27 return true; 28 } 29 30 public function writeFile($data, array $params) { 31 // The chunk engine does not support direct writes. 32 throw new PhutilMethodNotImplementedException(); 33 } 34 35 public function readFile($handle) { 36 // This is inefficient, but makes the API work as expected. 37 $chunks = $this->loadAllChunks($handle, true); 38 39 $buffer = ''; 40 foreach ($chunks as $chunk) { 41 $data_file = $chunk->getDataFile(); 42 if (!$data_file) { 43 throw new Exception(pht('This file data is incomplete!')); 44 } 45 46 $buffer .= $chunk->getDataFile()->loadFileData(); 47 } 48 49 return $buffer; 50 } 51 52 public function deleteFile($handle) { 53 $engine = new PhabricatorDestructionEngine(); 54 $chunks = $this->loadAllChunks($handle, true); 55 foreach ($chunks as $chunk) { 56 $engine->destroyObject($chunk); 57 } 58 } 59 60 private function loadAllChunks($handle, $need_files) { 61 $chunks = id(new PhabricatorFileChunkQuery()) 62 ->setViewer(PhabricatorUser::getOmnipotentUser()) 63 ->withChunkHandles(array($handle)) 64 ->needDataFiles($need_files) 65 ->execute(); 66 67 $chunks = msort($chunks, 'getByteStart'); 68 69 return $chunks; 70 } 71 72 /** 73 * Compute a chunked file hash for the viewer. 74 * 75 * We can not currently compute a real hash for chunked file uploads (because 76 * no process sees all of the file data). 77 * 78 * We also can not trust the hash that the user claims to have computed. If 79 * we trust the user, they can upload some `evil.exe` and claim it has the 80 * same file hash as `good.exe`. When another user later uploads the real 81 * `good.exe`, we'll just create a reference to the existing `evil.exe`. Users 82 * who download `good.exe` will then receive `evil.exe`. 83 * 84 * Instead, we rehash the user's claimed hash with account secrets. This 85 * allows users to resume file uploads, but not collide with other users. 86 * 87 * Ideally, we'd like to be able to verify hashes, but this is complicated 88 * and time consuming and gives us a fairly small benefit. 89 * 90 * @param PhabricatorUser $viewer Viewing user. 91 * @param string $hash Claimed file hash. 92 * @return string Rehashed file hash. 93 */ 94 public static function getChunkedHash(PhabricatorUser $viewer, $hash) { 95 if (!$viewer->getPHID()) { 96 throw new Exception( 97 pht('Unable to compute chunked hash without real viewer!')); 98 } 99 100 $input = $viewer->getAccountSecret().':'.$hash.':'.$viewer->getPHID(); 101 return self::getChunkedHashForInput($input); 102 } 103 104 public static function getChunkedHashForInput($input) { 105 $rehash = PhabricatorHash::weakDigest($input); 106 107 // Add a suffix to identify this as a chunk hash. 108 $rehash = substr($rehash, 0, -2).'-C'; 109 110 return $rehash; 111 } 112 113 public function allocateChunks($length, array $properties) { 114 $file = PhabricatorFile::newChunkedFile($this, $length, $properties); 115 116 $chunk_size = $this->getChunkSize(); 117 118 $handle = $file->getStorageHandle(); 119 120 $chunks = array(); 121 for ($ii = 0; $ii < $length; $ii += $chunk_size) { 122 $chunks[] = PhabricatorFileChunk::initializeNewChunk( 123 $handle, 124 $ii, 125 min($ii + $chunk_size, $length)); 126 } 127 128 $file->openTransaction(); 129 foreach ($chunks as $chunk) { 130 $chunk->save(); 131 } 132 $file->saveAndIndex(); 133 $file->saveTransaction(); 134 135 return $file; 136 } 137 138 /** 139 * Find a storage engine which is suitable for storing chunks. 140 * 141 * This engine must be a writable engine, have a filesize limit larger than 142 * the chunk limit, and must not be a chunk engine itself. 143 */ 144 private function getWritableEngine() { 145 // NOTE: We can't just load writable engines or we'll loop forever. 146 $engines = parent::loadAllEngines(); 147 148 foreach ($engines as $engine) { 149 if ($engine->isChunkEngine()) { 150 continue; 151 } 152 153 if ($engine->isTestEngine()) { 154 continue; 155 } 156 157 if (!$engine->canWriteFiles()) { 158 continue; 159 } 160 161 if ($engine->hasFilesizeLimit()) { 162 if ($engine->getFilesizeLimit() < $this->getChunkSize()) { 163 continue; 164 } 165 } 166 167 return true; 168 } 169 170 return false; 171 } 172 173 public function getChunkSize() { 174 return (4 * 1024 * 1024); 175 } 176 177 public function getRawFileDataIterator( 178 PhabricatorFile $file, 179 $begin, 180 $end, 181 PhabricatorFileStorageFormat $format) { 182 183 // NOTE: It is currently impossible for files stored with the chunk 184 // engine to have their own formatting (instead, the individual chunks 185 // are formatted), so we ignore the format object. 186 187 $chunks = id(new PhabricatorFileChunkQuery()) 188 ->setViewer(PhabricatorUser::getOmnipotentUser()) 189 ->withChunkHandles(array($file->getStorageHandle())) 190 ->withByteRange($begin, $end) 191 ->needDataFiles(true) 192 ->execute(); 193 194 return new PhabricatorFileChunkIterator($chunks, $begin, $end); 195 } 196 197}