Page MenuHomePhabricator

D15375.id37079.diff
No OneTemporary

D15375.id37079.diff

diff --git a/src/applications/harbormaster/storage/HarbormasterSchemaSpec.php b/src/applications/harbormaster/storage/HarbormasterSchemaSpec.php
--- a/src/applications/harbormaster/storage/HarbormasterSchemaSpec.php
+++ b/src/applications/harbormaster/storage/HarbormasterSchemaSpec.php
@@ -24,7 +24,7 @@
$this->buildRawSchema(
id(new HarbormasterBuildable())->getApplicationName(),
- 'harbormaster_buildlogchunk',
+ HarbormasterBuildLog::CHUNK_TABLE,
array(
'id' => 'auto',
'logID' => 'id',
diff --git a/src/applications/harbormaster/storage/build/HarbormasterBuildLog.php b/src/applications/harbormaster/storage/build/HarbormasterBuildLog.php
--- a/src/applications/harbormaster/storage/build/HarbormasterBuildLog.php
+++ b/src/applications/harbormaster/storage/build/HarbormasterBuildLog.php
@@ -11,16 +11,23 @@
protected $live;
private $buildTarget = self::ATTACHABLE;
+ private $rope;
+ private $isOpen;
const CHUNK_BYTE_LIMIT = 102400;
+ const CHUNK_TABLE = 'harbormaster_buildlogchunk';
/**
* The log is encoded as plain text.
*/
const ENCODING_TEXT = 'text';
+ public function __construct() {
+ $this->rope = new PhutilRope();
+ }
+
public function __destruct() {
- if ($this->getLive()) {
+ if ($this->isOpen) {
$this->closeBuildLog();
}
}
@@ -35,17 +42,19 @@
}
public function openBuildLog() {
- if ($this->getLive()) {
+ if ($this->isOpen) {
throw new Exception(pht('This build log is already open!'));
}
+ $this->isOpen = true;
+
return $this
->setLive(1)
->save();
}
public function closeBuildLog() {
- if (!$this->getLive()) {
+ if (!$this->isOpen) {
throw new Exception(pht('This build log is not open!'));
}
@@ -108,63 +117,72 @@
}
$content = (string)$content;
- if (!strlen($content)) {
- return;
- }
- // If the length of the content is greater than the chunk size limit,
- // then we can never fit the content in a single record. We need to
- // split our content out and call append on it for as many parts as there
- // are to the content.
- if (strlen($content) > self::CHUNK_BYTE_LIMIT) {
- $current = $content;
- while (strlen($current) > self::CHUNK_BYTE_LIMIT) {
- $part = substr($current, 0, self::CHUNK_BYTE_LIMIT);
- $current = substr($current, self::CHUNK_BYTE_LIMIT);
- $this->append($part);
+ $this->rope->append($content);
+ $this->flush();
+ }
+
+ private function flush() {
+
+ // TODO: Maybe don't flush more than a couple of times per second. If a
+ // caller writes a single character over and over again, we'll currently
+ // spend a lot of time flushing that.
+
+ $chunk_table = self::CHUNK_TABLE;
+ $chunk_limit = self::CHUNK_BYTE_LIMIT;
+ $rope = $this->rope;
+
+ while (true) {
+ $length = $rope->getByteLength();
+ if (!$length) {
+ break;
}
- $this->append($current);
- return;
- }
- // Retrieve the size of last chunk from the DB for this log. If the
- // chunk is over 500K, then we need to create a new log entry.
- $conn = $this->establishConnection('w');
- $result = queryfx_all(
- $conn,
- 'SELECT id, size, encoding '.
- 'FROM harbormaster_buildlogchunk '.
- 'WHERE logID = %d '.
- 'ORDER BY id DESC '.
- 'LIMIT 1',
- $this->getID());
- if (count($result) === 0 ||
- $result[0]['size'] + strlen($content) > self::CHUNK_BYTE_LIMIT ||
- $result[0]['encoding'] !== self::ENCODING_TEXT) {
-
- // We must insert a new chunk because the data we are appending
- // won't fit into the existing one, or we don't have any existing
- // chunk data.
- queryfx(
- $conn,
- 'INSERT INTO harbormaster_buildlogchunk '.
- '(logID, encoding, size, chunk) '.
- 'VALUES '.
- '(%d, %s, %d, %B)',
- $this->getID(),
- self::ENCODING_TEXT,
- strlen($content),
- $content);
- } else {
- // We have a resulting record that we can append our content onto.
- queryfx(
- $conn,
- 'UPDATE harbormaster_buildlogchunk '.
- 'SET chunk = CONCAT(chunk, %B), size = LENGTH(CONCAT(chunk, %B))'.
- 'WHERE id = %d',
- $content,
- $content,
- $result[0]['id']);
+ $conn_w = $this->establishConnection('w');
+ $tail = queryfx_one(
+ $conn_w,
+ 'SELECT id, size, encoding FROM %T WHERE logID = %d
+ ORDER BY id DESC LIMIT 1',
+ $chunk_table,
+ $this->getID());
+
+ $can_append =
+ ($tail) &&
+ ($tail['encoding'] == self::ENCODING_TEXT) &&
+ ($tail['size'] < $chunk_limit);
+ if ($can_append) {
+ $append_id = $tail['id'];
+ $prefix_size = $tail['size'];
+ } else {
+ $append_id = null;
+ $prefix_size = 0;
+ }
+
+ $data_limit = ($chunk_limit - $prefix_size);
+ $append_data = $rope->getPrefixBytes($data_limit);
+ $data_size = strlen($append_data);
+
+ if ($append_id) {
+ queryfx(
+ $conn_w,
+ 'UPDATE %T SET chunk = CONCAT(chunk, %B), size = %d WHERE id = %d',
+ $chunk_table,
+ $append_data,
+ $prefix_size + $data_size,
+ $append_id);
+ } else {
+ queryfx(
+ $conn_w,
+ 'INSERT INTO %T (logID, encoding, size, chunk)
+ VALUES (%d, %s, %d, %B)',
+ $chunk_table,
+ $this->getID(),
+ self::ENCODING_TEXT,
+ $data_size,
+ $append_data);
+ }
+
+ $rope->removeBytesFromHead(strlen($append_data));
}
}

File Metadata

Mime Type
text/plain
Expires
Sun, Mar 9, 6:40 PM (2 d, 8 h ago)
Storage Engine
blob
Storage Format
Encrypted (AES-256-CBC)
Storage Handle
7390625
Default Alt Text
D15375.id37079.diff (5 KB)

Event Timeline