2009-12-30 15:02:46 +09:00
|
|
|
<?php
|
|
|
|
/**
|
|
|
|
* StatusNet, the distributed open-source microblogging tool
|
|
|
|
*
|
|
|
|
* Data class for user location preferences
|
|
|
|
*
|
|
|
|
* PHP version 5
|
|
|
|
*
|
|
|
|
* LICENCE: This program is free software: you can redistribute it and/or modify
|
|
|
|
* it under the terms of the GNU Affero General Public License as published by
|
|
|
|
* the Free Software Foundation, either version 3 of the License, or
|
|
|
|
* (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
* GNU Affero General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Affero General Public License
|
|
|
|
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
*
|
|
|
|
* @category Data
|
|
|
|
* @package StatusNet
|
|
|
|
* @author Evan Prodromou <evan@status.net>
|
|
|
|
* @copyright 2009 StatusNet Inc.
|
|
|
|
* @license http://www.fsf.org/licensing/licenses/agpl-3.0.html GNU Affero General Public License version 3.0
|
|
|
|
* @link http://status.net/
|
|
|
|
*/
|
|
|
|
|
|
|
|
require_once INSTALLDIR.'/classes/Memcached_DataObject.php';
|
|
|
|
|
|
|
|
class Inbox extends Memcached_DataObject
|
|
|
|
{
|
2009-12-31 04:06:07 +09:00
|
|
|
const BOXCAR = 128;
|
2010-02-05 07:50:20 +09:00
|
|
|
const MAX_NOTICES = 1024;
|
2009-12-31 04:06:07 +09:00
|
|
|
|
2009-12-30 15:02:46 +09:00
|
|
|
###START_AUTOCODE
|
|
|
|
/* the code below is auto generated do not remove the above tag */
|
|
|
|
|
|
|
|
public $__table = 'inbox'; // table name
|
|
|
|
public $user_id; // int(4) primary_key not_null
|
|
|
|
public $notice_ids; // blob
|
|
|
|
|
|
|
|
/* Static get */
|
|
|
|
function staticGet($k,$v=NULL) { return Memcached_DataObject::staticGet('Inbox',$k,$v); }
|
|
|
|
|
|
|
|
/* the code above is auto generated do not remove the tag below */
|
|
|
|
###END_AUTOCODE
|
|
|
|
|
|
|
|
function sequenceKey()
|
|
|
|
{
|
|
|
|
return array(false, false, false);
|
|
|
|
}
|
2009-12-31 04:06:07 +09:00
|
|
|
|
2010-01-10 03:01:07 +09:00
|
|
|
/**
|
|
|
|
* Create a new inbox from existing Notice_inbox stuff
|
|
|
|
*/
|
|
|
|
|
|
|
|
static function initialize($user_id)
|
2010-01-14 09:26:46 +09:00
|
|
|
{
|
|
|
|
$inbox = Inbox::fromNoticeInbox($user_id);
|
|
|
|
|
|
|
|
unset($inbox->fake);
|
|
|
|
|
|
|
|
$result = $inbox->insert();
|
|
|
|
|
|
|
|
if (!$result) {
|
|
|
|
common_log_db_error($inbox, 'INSERT', __FILE__);
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
|
|
|
return $inbox;
|
|
|
|
}
|
|
|
|
|
|
|
|
static function fromNoticeInbox($user_id)
|
2009-12-31 04:06:07 +09:00
|
|
|
{
|
2010-01-10 03:01:07 +09:00
|
|
|
$ids = array();
|
|
|
|
|
|
|
|
$ni = new Notice_inbox();
|
|
|
|
|
|
|
|
$ni->user_id = $user_id;
|
|
|
|
$ni->selectAdd();
|
|
|
|
$ni->selectAdd('notice_id');
|
|
|
|
$ni->orderBy('notice_id DESC');
|
2010-02-05 07:50:20 +09:00
|
|
|
$ni->limit(0, self::MAX_NOTICES);
|
2010-01-10 03:01:07 +09:00
|
|
|
|
|
|
|
if ($ni->find()) {
|
|
|
|
while($ni->fetch()) {
|
|
|
|
$ids[] = $ni->notice_id;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
$ni->free();
|
|
|
|
unset($ni);
|
|
|
|
|
2009-12-31 04:06:07 +09:00
|
|
|
$inbox = new Inbox();
|
|
|
|
|
2010-01-10 03:01:07 +09:00
|
|
|
$inbox->user_id = $user_id;
|
2010-04-03 07:45:03 +09:00
|
|
|
$inbox->pack($ids);
|
2010-01-14 09:26:46 +09:00
|
|
|
$inbox->fake = true;
|
2010-01-10 03:01:07 +09:00
|
|
|
|
|
|
|
return $inbox;
|
|
|
|
}
|
|
|
|
|
2010-04-03 07:45:03 +09:00
|
|
|
/**
|
|
|
|
* Append the given notice to the given user's inbox.
|
|
|
|
* Caching updates are managed for the inbox itself.
|
|
|
|
*
|
|
|
|
* If the notice is already in this inbox, the second
|
|
|
|
* add will be silently dropped.
|
|
|
|
*
|
|
|
|
* @param int @user_id
|
|
|
|
* @param int $notice_id
|
|
|
|
* @return boolean success
|
|
|
|
*/
|
2010-01-10 03:01:07 +09:00
|
|
|
static function insertNotice($user_id, $notice_id)
|
|
|
|
{
|
2010-01-14 15:12:19 +09:00
|
|
|
$inbox = DB_DataObject::staticGet('inbox', 'user_id', $user_id);
|
2010-01-10 03:01:07 +09:00
|
|
|
|
2010-01-14 15:12:19 +09:00
|
|
|
if (empty($inbox)) {
|
2010-01-10 03:01:07 +09:00
|
|
|
$inbox = Inbox::initialize($user_id);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (empty($inbox)) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2010-04-03 07:45:03 +09:00
|
|
|
$ids = $inbox->unpack();
|
|
|
|
if (in_array(intval($notice_id), $ids)) {
|
|
|
|
// Already in there, we probably re-ran some inbox adds
|
|
|
|
// due to an error. Skip the dupe silently.
|
2010-04-02 01:57:52 +09:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2010-01-10 03:01:07 +09:00
|
|
|
$result = $inbox->query(sprintf('UPDATE inbox '.
|
|
|
|
'set notice_ids = concat(cast(0x%08x as binary(4)), '.
|
2010-02-05 07:50:20 +09:00
|
|
|
'substr(notice_ids, 1, %d)) '.
|
2010-01-10 03:01:07 +09:00
|
|
|
'WHERE user_id = %d',
|
2010-02-05 07:50:20 +09:00
|
|
|
$notice_id,
|
|
|
|
4 * (self::MAX_NOTICES - 1),
|
|
|
|
$user_id));
|
2010-01-10 03:01:07 +09:00
|
|
|
|
|
|
|
if ($result) {
|
2010-01-26 08:08:21 +09:00
|
|
|
self::blow('inbox:user_id:%d', $user_id);
|
2010-01-10 03:01:07 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
return $result;
|
2009-12-31 04:06:07 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
static function bulkInsert($notice_id, $user_ids)
|
|
|
|
{
|
2010-01-10 03:26:50 +09:00
|
|
|
foreach ($user_ids as $user_id)
|
|
|
|
{
|
|
|
|
Inbox::insertNotice($user_id, $notice_id);
|
2009-12-31 04:06:07 +09:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2010-03-03 04:54:02 +09:00
|
|
|
function stream($user_id, $offset, $limit, $since_id, $max_id, $own=false)
|
2009-12-31 04:06:07 +09:00
|
|
|
{
|
|
|
|
$inbox = Inbox::staticGet('user_id', $user_id);
|
|
|
|
|
|
|
|
if (empty($inbox)) {
|
2010-01-14 09:26:46 +09:00
|
|
|
$inbox = Inbox::fromNoticeInbox($user_id);
|
2010-01-10 03:01:07 +09:00
|
|
|
if (empty($inbox)) {
|
|
|
|
return array();
|
2010-01-14 09:26:46 +09:00
|
|
|
} else {
|
|
|
|
$inbox->encache();
|
2010-01-10 03:01:07 +09:00
|
|
|
}
|
2009-12-31 04:06:07 +09:00
|
|
|
}
|
|
|
|
|
2010-04-03 07:45:03 +09:00
|
|
|
$ids = $inbox->unpack();
|
2009-12-31 04:06:07 +09:00
|
|
|
|
2010-01-14 15:32:17 +09:00
|
|
|
if (!empty($since_id)) {
|
2010-01-16 03:01:50 +09:00
|
|
|
$newids = array();
|
|
|
|
foreach ($ids as $id) {
|
|
|
|
if ($id > $since_id) {
|
|
|
|
$newids[] = $id;
|
|
|
|
}
|
2010-01-14 15:32:17 +09:00
|
|
|
}
|
2010-01-16 03:01:50 +09:00
|
|
|
$ids = $newids;
|
2010-01-14 15:32:17 +09:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!empty($max_id)) {
|
2010-01-16 03:01:50 +09:00
|
|
|
$newids = array();
|
|
|
|
foreach ($ids as $id) {
|
|
|
|
if ($id <= $max_id) {
|
|
|
|
$newids[] = $id;
|
|
|
|
}
|
2010-01-14 15:32:17 +09:00
|
|
|
}
|
2010-01-16 03:01:50 +09:00
|
|
|
$ids = $newids;
|
2010-01-14 15:32:17 +09:00
|
|
|
}
|
2009-12-31 04:06:07 +09:00
|
|
|
|
|
|
|
$ids = array_slice($ids, $offset, $limit);
|
2010-01-01 04:09:07 +09:00
|
|
|
|
|
|
|
return $ids;
|
2009-12-31 04:06:07 +09:00
|
|
|
}
|
2010-02-05 07:50:20 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Wrapper for Inbox::stream() and Notice::getStreamByIds() returning
|
|
|
|
* additional items up to the limit if we were short due to deleted
|
|
|
|
* notices still being listed in the inbox.
|
|
|
|
*
|
|
|
|
* The fast path (when no items are deleted) should be just as fast; the
|
|
|
|
* offset parameter is applied *before* lookups for maximum efficiency.
|
|
|
|
*
|
|
|
|
* This means offset-based paging may show duplicates, but similar behavior
|
|
|
|
* already exists when new notices are posted between page views, so we
|
|
|
|
* think people will be ok with this until id-based paging is introduced
|
|
|
|
* to the user interface.
|
|
|
|
*
|
|
|
|
* @param int $user_id
|
|
|
|
* @param int $offset skip past the most recent N notices (after since_id checks)
|
|
|
|
* @param int $limit
|
|
|
|
* @param mixed $since_id return only notices after but not including this id
|
|
|
|
* @param mixed $max_id return only notices up to and including this id
|
|
|
|
* @param mixed $own ignored?
|
|
|
|
* @return array of Notice objects
|
|
|
|
*
|
|
|
|
* @todo consider repacking the inbox when this happens?
|
2010-03-03 04:54:02 +09:00
|
|
|
* @fixme reimplement $own if we need it?
|
2010-02-05 07:50:20 +09:00
|
|
|
*/
|
2010-03-03 04:54:02 +09:00
|
|
|
function streamNotices($user_id, $offset, $limit, $since_id, $max_id, $own=false)
|
2010-02-05 07:50:20 +09:00
|
|
|
{
|
2010-03-03 04:54:02 +09:00
|
|
|
$ids = self::stream($user_id, $offset, self::MAX_NOTICES, $since_id, $max_id, $own);
|
2010-02-05 07:50:20 +09:00
|
|
|
|
|
|
|
// Do a bulk lookup for the first $limit items
|
|
|
|
// Fast path when nothing's deleted.
|
|
|
|
$firstChunk = array_slice($ids, 0, $limit);
|
|
|
|
$notices = Notice::getStreamByIds($firstChunk);
|
|
|
|
|
|
|
|
$wanted = count($firstChunk); // raw entry count in the inbox up to our $limit
|
|
|
|
if ($notices->N >= $wanted) {
|
|
|
|
return $notices;
|
|
|
|
}
|
|
|
|
|
|
|
|
// There were deleted notices, we'll need to look for more.
|
|
|
|
assert($notices instanceof ArrayWrapper);
|
|
|
|
$items = $notices->_items;
|
|
|
|
$remainder = array_slice($ids, $limit);
|
|
|
|
|
|
|
|
while (count($items) < $wanted && count($remainder) > 0) {
|
|
|
|
$notice = Notice::staticGet(array_shift($remainder));
|
|
|
|
if ($notice) {
|
|
|
|
$items[] = $notice;
|
|
|
|
} else {
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return new ArrayWrapper($items);
|
|
|
|
}
|
2010-04-03 07:45:03 +09:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Saves a list of integer notice_ids into a packed blob in this object.
|
|
|
|
* @param array $ids list of integer notice_ids
|
|
|
|
*/
|
|
|
|
protected function pack(array $ids)
|
|
|
|
{
|
|
|
|
$this->notice_ids = call_user_func_array('pack', array_merge(array('N*'), $ids));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @return array of integer notice_ids
|
|
|
|
*/
|
|
|
|
protected function unpack()
|
|
|
|
{
|
|
|
|
return unpack('N*', $this->notice_ids);
|
|
|
|
}
|
2009-12-30 15:02:46 +09:00
|
|
|
}
|