cargo clippy

This commit is contained in:
Nyaaori 2022-10-10 14:09:11 +02:00
parent ca82b2940d
commit f430b87459
No known key found for this signature in database
GPG key ID: E7819C3ED4D1F82E
32 changed files with 139 additions and 166 deletions

View file

@ -193,7 +193,7 @@ impl KvTree for RocksDbEngineTree<'_> {
fn increment(&self, key: &[u8]) -> Result<Vec<u8>> {
let lock = self.write_lock.write().unwrap();
let old = self.db.rocks.get_cf(&self.cf(), &key)?;
let old = self.db.rocks.get_cf(&self.cf(), key)?;
let new = utils::increment(old.as_deref()).unwrap();
self.db.rocks.put_cf(&self.cf(), key, &new)?;

View file

@ -48,13 +48,13 @@ pub struct Engine {
impl Engine {
fn prepare_conn(path: &Path, cache_size_kb: u32) -> Result<Connection> {
let conn = Connection::open(&path)?;
let conn = Connection::open(path)?;
conn.pragma_update(Some(Main), "page_size", &2048)?;
conn.pragma_update(Some(Main), "journal_mode", &"WAL")?;
conn.pragma_update(Some(Main), "synchronous", &"NORMAL")?;
conn.pragma_update(Some(Main), "cache_size", &(-i64::from(cache_size_kb)))?;
conn.pragma_update(Some(Main), "wal_autocheckpoint", &0)?;
conn.pragma_update(Some(Main), "page_size", 2048)?;
conn.pragma_update(Some(Main), "journal_mode", "WAL")?;
conn.pragma_update(Some(Main), "synchronous", "NORMAL")?;
conn.pragma_update(Some(Main), "cache_size", -i64::from(cache_size_kb))?;
conn.pragma_update(Some(Main), "wal_autocheckpoint", 0)?;
Ok(conn)
}
@ -75,7 +75,7 @@ impl Engine {
pub fn flush_wal(self: &Arc<Self>) -> Result<()> {
self.write_lock()
.pragma_update(Some(Main), "wal_checkpoint", &"RESTART")?;
.pragma_update(Some(Main), "wal_checkpoint", "RESTART")?;
Ok(())
}
}

View file

@ -134,7 +134,7 @@ impl service::globals::Data for KeyValueDatabase {
let mut parts = keypair_bytes.splitn(2, |&b| b == 0xff);
let keypair = utils::string_from_bytes(
utils::string_from_bytes(
// 1. version
parts
.next()
@ -151,9 +151,7 @@ impl service::globals::Data for KeyValueDatabase {
.and_then(|(version, key)| {
Ed25519KeyPair::from_der(key, version)
.map_err(|_| Error::bad_database("Private or public keys are invalid."))
});
keypair
})
}
fn remove_keypair(&self) -> Result<()> {
self.global.remove(b"keypair")

View file

@ -40,7 +40,7 @@ impl service::pusher::Data for KeyValueDatabase {
self.senderkey_pusher
.get(&senderkey)?
.map(|push| {
serde_json::from_slice(&*push)
serde_json::from_slice(&push)
.map_err(|_| Error::bad_database("Invalid Pusher in db."))
})
.transpose()
@ -53,7 +53,7 @@ impl service::pusher::Data for KeyValueDatabase {
self.senderkey_pusher
.scan_prefix(prefix)
.map(|(_, push)| {
serde_json::from_slice(&*push)
serde_json::from_slice(&push)
.map_err(|_| Error::bad_database("Invalid Pusher in db."))
})
.collect()

View file

@ -9,7 +9,7 @@ impl service::rooms::alias::Data for KeyValueDatabase {
let mut aliasid = room_id.as_bytes().to_vec();
aliasid.push(0xff);
aliasid.extend_from_slice(&services().globals.next_count()?.to_be_bytes());
self.aliasid_alias.insert(&aliasid, &*alias.as_bytes())?;
self.aliasid_alias.insert(&aliasid, alias.as_bytes())?;
Ok(())
}

View file

@ -88,7 +88,7 @@ impl service::rooms::edus::presence::Data for KeyValueDatabase {
for (key, value) in self
.presenceid_presence
.iter_from(&*first_possible_edu, false)
.iter_from(&first_possible_edu, false)
.take_while(|(key, _)| key.starts_with(&prefix))
{
let user_id = UserId::parse(

View file

@ -17,7 +17,7 @@ impl service::rooms::edus::typing::Data for KeyValueDatabase {
room_typing_id.extend_from_slice(&count);
self.typingid_userid
.insert(&room_typing_id, &*user_id.as_bytes())?;
.insert(&room_typing_id, user_id.as_bytes())?;
self.roomid_lasttypingupdate
.insert(room_id.as_bytes(), &count)?;

View file

@ -15,7 +15,7 @@ impl service::rooms::search::Data for KeyValueDatabase {
let mut key = shortroomid.to_be_bytes().to_vec();
key.extend_from_slice(word.as_bytes());
key.push(0xff);
key.extend_from_slice(&pdu_id);
key.extend_from_slice(pdu_id);
(key, Vec::new())
});

View file

@ -39,7 +39,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
{
hash_map::Entry::Vacant(v) => {
if let Some(last_count) = self
.pdus_until(&sender_user, &room_id, u64::MAX)?
.pdus_until(sender_user, room_id, u64::MAX)?
.filter_map(|r| {
// Filter out buggy events
if r.is_err() {
@ -205,8 +205,7 @@ impl service::rooms::timeline::Data for KeyValueDatabase {
.unwrap()
.insert(pdu.room_id.clone(), count);
self.eventid_pduid
.insert(pdu.event_id.as_bytes(), &pdu_id)?;
self.eventid_pduid.insert(pdu.event_id.as_bytes(), pdu_id)?;
self.eventid_outlierpdu.remove(pdu.event_id.as_bytes())?;
Ok(())

View file

@ -114,7 +114,7 @@ impl service::rooms::user::Data for KeyValueDatabase {
utils::common_elements(iterators, Ord::cmp)
.expect("users is not empty")
.map(|bytes| {
RoomId::parse(utils::string_from_bytes(&*bytes).map_err(|_| {
RoomId::parse(utils::string_from_bytes(&bytes).map_err(|_| {
Error::bad_database("Invalid RoomId bytes in userroomid_joined")
})?)
.map_err(|_| Error::bad_database("Invalid RoomId in userroomid_joined."))

View file

@ -38,7 +38,7 @@ impl service::sending::Data for KeyValueDatabase {
fn delete_all_active_requests_for(&self, outgoing_kind: &OutgoingKind) -> Result<()> {
let prefix = outgoing_kind.get_prefix();
for (key, _) in self.servercurrentevent_data.scan_prefix(prefix.clone()) {
for (key, _) in self.servercurrentevent_data.scan_prefix(prefix) {
self.servercurrentevent_data.remove(&key)?;
}
@ -51,7 +51,7 @@ impl service::sending::Data for KeyValueDatabase {
self.servercurrentevent_data.remove(&key).unwrap();
}
for (key, _) in self.servernameevent_data.scan_prefix(prefix.clone()) {
for (key, _) in self.servernameevent_data.scan_prefix(prefix) {
self.servernameevent_data.remove(&key).unwrap();
}
@ -67,7 +67,7 @@ impl service::sending::Data for KeyValueDatabase {
for (outgoing_kind, event) in requests {
let mut key = outgoing_kind.get_prefix();
key.extend_from_slice(if let SendingEventType::Pdu(value) = &event {
&**value
value
} else {
&[]
});
@ -91,7 +91,7 @@ impl service::sending::Data for KeyValueDatabase {
let prefix = outgoing_kind.get_prefix();
return Box::new(
self.servernameevent_data
.scan_prefix(prefix.clone())
.scan_prefix(prefix)
.map(|(k, v)| parse_servercurrentevent(&k, v).map(|(_, ev)| (ev, k))),
);
}
@ -155,7 +155,7 @@ fn parse_servercurrentevent(
let mut parts = key[1..].splitn(3, |&b| b == 0xff);
let user = parts.next().expect("splitn always returns one element");
let user_string = utils::string_from_bytes(&user)
let user_string = utils::string_from_bytes(user)
.map_err(|_| Error::bad_database("Invalid user string in servercurrentevent"))?;
let user_id = UserId::parse(user_string)
.map_err(|_| Error::bad_database("Invalid user id in servercurrentevent"))?;

View file

@ -5,10 +5,9 @@ use ruma::{
encryption::{CrossSigningKey, DeviceKeys, OneTimeKey},
events::{AnyToDeviceEvent, StateEventType},
serde::Raw,
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, OwnedUserId, UInt,
UserId,
DeviceId, DeviceKeyAlgorithm, DeviceKeyId, MilliSecondsSinceUnixEpoch, OwnedDeviceId,
OwnedDeviceKeyId, OwnedMxcUri, OwnedUserId, UInt, UserId,
};
use ruma::{OwnedDeviceId, OwnedDeviceKeyId, OwnedMxcUri};
use tracing::warn;
use crate::{
@ -380,13 +379,12 @@ impl service::users::Data for KeyValueDatabase {
Ok((
serde_json::from_slice(
&*key
.rsplit(|&b| b == 0xff)
key.rsplit(|&b| b == 0xff)
.next()
.ok_or_else(|| Error::bad_database("OneTimeKeyId in db is invalid."))?,
)
.map_err(|_| Error::bad_database("OneTimeKeyId in db is invalid."))?,
serde_json::from_slice(&*value)
serde_json::from_slice(&value)
.map_err(|_| Error::bad_database("OneTimeKeys in db are invalid."))?,
))
})
@ -410,7 +408,7 @@ impl service::users::Data for KeyValueDatabase {
.map(|(bytes, _)| {
Ok::<_, Error>(
serde_json::from_slice::<OwnedDeviceKeyId>(
&*bytes.rsplit(|&b| b == 0xff).next().ok_or_else(|| {
bytes.rsplit(|&b| b == 0xff).next().ok_or_else(|| {
Error::bad_database("OneTimeKey ID in db is invalid.")
})?,
)

View file

@ -2,22 +2,17 @@ pub mod abstraction;
pub mod key_value;
use crate::{services, utils, Config, Error, PduEvent, Result, Services, SERVICES};
use abstraction::KeyValueDatabaseEngine;
use abstraction::KvTree;
use abstraction::{KeyValueDatabaseEngine, KvTree};
use directories::ProjectDirs;
use lru_cache::LruCache;
use ruma::CanonicalJsonValue;
use ruma::OwnedDeviceId;
use ruma::OwnedEventId;
use ruma::OwnedRoomId;
use ruma::OwnedUserId;
use ruma::{
events::{
push_rules::PushRulesEventContent, room::message::RoomMessageEventContent,
GlobalAccountDataEvent, GlobalAccountDataEventType, StateEventType,
},
push::Ruleset,
EventId, RoomId, UserId,
CanonicalJsonValue, EventId, OwnedDeviceId, OwnedEventId, OwnedRoomId, OwnedUserId, RoomId,
UserId,
};
use std::{
collections::{BTreeMap, HashMap, HashSet},