Work on rooms/state, database, alias, directory, edus services, event_handler, lazy_loading, metadata, outlier, and pdu_metadata
This commit is contained in:
parent
604b1a5cf1
commit
865e35df17
22 changed files with 1544 additions and 2282 deletions
|
@ -1,25 +1,30 @@
|
|||
mod data;
|
||||
pub use data::Data;
|
||||
|
||||
use crate::service::*;
|
||||
|
||||
pub struct Service<D: Data> {
|
||||
db: D,
|
||||
}
|
||||
|
||||
impl Service {
|
||||
impl Service<_> {
|
||||
/// Set the room to the given statehash and update caches.
|
||||
#[tracing::instrument(skip(self, new_state_ids_compressed, db))]
|
||||
pub fn force_state(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
shortstatehash: u64,
|
||||
statediffnew :HashSet<CompressedStateEvent>,
|
||||
statediffremoved :HashSet<CompressedStateEvent>,
|
||||
statediffnew: HashSet<CompressedStateEvent>,
|
||||
statediffremoved: HashSet<CompressedStateEvent>,
|
||||
db: &Database,
|
||||
) -> Result<()> {
|
||||
|
||||
for event_id in statediffnew.into_iter().filter_map(|new| {
|
||||
self.parse_compressed_state_event(new)
|
||||
state_compressor::parse_compressed_state_event(new)
|
||||
.ok()
|
||||
.map(|(_, id)| id)
|
||||
}) {
|
||||
let pdu = match self.get_pdu_json(&event_id)? {
|
||||
let pdu = match timeline::get_pdu_json(&event_id)? {
|
||||
Some(pdu) => pdu,
|
||||
None => continue,
|
||||
};
|
||||
|
@ -55,56 +60,12 @@ impl Service {
|
|||
Err(_) => continue,
|
||||
};
|
||||
|
||||
self.update_membership(room_id, &user_id, membership, &pdu.sender, None, db, false)?;
|
||||
room::state_cache::update_membership(room_id, &user_id, membership, &pdu.sender, None, db, false)?;
|
||||
}
|
||||
|
||||
self.update_joined_count(room_id, db)?;
|
||||
room::state_cache::update_joined_count(room_id, db)?;
|
||||
|
||||
self.roomid_shortstatehash
|
||||
.insert(room_id.as_bytes(), &new_shortstatehash.to_be_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the leaf pdus of a room.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn get_pdu_leaves(&self, room_id: &RoomId) -> Result<HashSet<Arc<EventId>>> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
self.roomid_pduleaves
|
||||
.scan_prefix(prefix)
|
||||
.map(|(_, bytes)| {
|
||||
EventId::parse_arc(utils::string_from_bytes(&bytes).map_err(|_| {
|
||||
Error::bad_database("EventID in roomid_pduleaves is invalid unicode.")
|
||||
})?)
|
||||
.map_err(|_| Error::bad_database("EventId in roomid_pduleaves is invalid."))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Replace the leaves of a room.
|
||||
///
|
||||
/// The provided `event_ids` become the new leaves, this allows a room to have multiple
|
||||
/// `prev_events`.
|
||||
#[tracing::instrument(skip(self))]
|
||||
pub fn replace_pdu_leaves<'a>(
|
||||
&self,
|
||||
room_id: &RoomId,
|
||||
event_ids: impl IntoIterator<Item = &'a EventId> + Debug,
|
||||
) -> Result<()> {
|
||||
let mut prefix = room_id.as_bytes().to_vec();
|
||||
prefix.push(0xff);
|
||||
|
||||
for (key, _) in self.roomid_pduleaves.scan_prefix(prefix.clone()) {
|
||||
self.roomid_pduleaves.remove(&key)?;
|
||||
}
|
||||
|
||||
for event_id in event_ids {
|
||||
let mut key = prefix.to_owned();
|
||||
key.extend_from_slice(event_id.as_bytes());
|
||||
self.roomid_pduleaves.insert(&key, event_id.as_bytes())?;
|
||||
}
|
||||
db.set_room_state(room_id, new_shortstatehash);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -121,11 +82,11 @@ impl Service {
|
|||
state_ids_compressed: HashSet<CompressedStateEvent>,
|
||||
globals: &super::globals::Globals,
|
||||
) -> Result<()> {
|
||||
let shorteventid = self.get_or_create_shorteventid(event_id, globals)?;
|
||||
let shorteventid = short::get_or_create_shorteventid(event_id, globals)?;
|
||||
|
||||
let previous_shortstatehash = self.current_shortstatehash(room_id)?;
|
||||
let previous_shortstatehash = db.get_room_shortstatehash(room_id)?;
|
||||
|
||||
let state_hash = self.calculate_hash(
|
||||
let state_hash = super::calculate_hash(
|
||||
&state_ids_compressed
|
||||
.iter()
|
||||
.map(|s| &s[..])
|
||||
|
@ -133,11 +94,11 @@ impl Service {
|
|||
);
|
||||
|
||||
let (shortstatehash, already_existed) =
|
||||
self.get_or_create_shortstatehash(&state_hash, globals)?;
|
||||
short::get_or_create_shortstatehash(&state_hash, globals)?;
|
||||
|
||||
if !already_existed {
|
||||
let states_parents = previous_shortstatehash
|
||||
.map_or_else(|| Ok(Vec::new()), |p| self.load_shortstatehash_info(p))?;
|
||||
.map_or_else(|| Ok(Vec::new()), |p| room::state_compressor.load_shortstatehash_info(p))?;
|
||||
|
||||
let (statediffnew, statediffremoved) =
|
||||
if let Some(parent_stateinfo) = states_parents.last() {
|
||||
|
@ -156,7 +117,7 @@ impl Service {
|
|||
} else {
|
||||
(state_ids_compressed, HashSet::new())
|
||||
};
|
||||
self.save_state_from_diff(
|
||||
state_compressor::save_state_from_diff(
|
||||
shortstatehash,
|
||||
statediffnew,
|
||||
statediffremoved,
|
||||
|
@ -165,8 +126,7 @@ impl Service {
|
|||
)?;
|
||||
}
|
||||
|
||||
self.shorteventid_shortstatehash
|
||||
.insert(&shorteventid.to_be_bytes(), &shortstatehash.to_be_bytes())?;
|
||||
db.set_event_state(&shorteventid.to_be_bytes(), &shortstatehash.to_be_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -183,7 +143,7 @@ impl Service {
|
|||
) -> Result<u64> {
|
||||
let shorteventid = self.get_or_create_shorteventid(&new_pdu.event_id, globals)?;
|
||||
|
||||
let previous_shortstatehash = self.current_shortstatehash(&new_pdu.room_id)?;
|
||||
let previous_shortstatehash = self.get_room_shortstatehash(&new_pdu.room_id)?;
|
||||
|
||||
if let Some(p) = previous_shortstatehash {
|
||||
self.shorteventid_shortstatehash
|
||||
|
@ -293,4 +253,8 @@ impl Service {
|
|||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn db(&self) -> D {
|
||||
&self.db
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue