refactor: resolve all clippy warnings

- Fix Arc clone warnings using explicit Arc::clone syntax across 9 files
- Add #[allow(clippy::expect_used)] to test functions for appropriate expect usage
- Remove no-effect statements from debug code cleanup
- Apply clippy auto-fixes for dbg\! macro removals and path statements
- Achieve zero clippy warnings on all targets with -D warnings flag
This commit is contained in:
Tunglies
2025-08-17 11:25:10 +08:00
Unverified
parent c62f600477
commit 26a8cede95
8 changed files with 65 additions and 47 deletions

View File

@@ -1460,8 +1460,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加哔哩哔哩大陆检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bilibili_china_mainland(&client).await;
let mut results = results.lock().await;
@@ -1471,8 +1471,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加哔哩哔哩港澳台检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bilibili_hk_mc_tw(&client).await;
let mut results = results.lock().await;
@@ -1482,8 +1482,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加合并的ChatGPT检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let chatgpt_results = check_chatgpt_combined(&client).await;
let mut results = results.lock().await;
@@ -1493,8 +1493,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加Gemini检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_gemini(&client).await;
let mut results = results.lock().await;
@@ -1504,8 +1504,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加YouTube Premium检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_youtube_premium(&client).await;
let mut results = results.lock().await;
@@ -1515,8 +1515,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加动画疯检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bahamut_anime(&client).await;
let mut results = results.lock().await;
@@ -1526,8 +1526,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加 Netflix 检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_netflix(&client).await;
let mut results = results.lock().await;
@@ -1537,8 +1537,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加 Disney+ 检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_disney_plus(&client).await;
let mut results = results.lock().await;
@@ -1548,8 +1548,8 @@ pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
// 添加 Prime Video 检测任务
{
let client = client_arc.clone();
let results = results.clone();
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_prime_video(&client).await;
let mut results = results.lock().await;

View File

@@ -174,33 +174,33 @@ mod tests {
use std::mem;
#[test]
#[allow(unused_variables)]
#[allow(clippy::expect_used)]
fn test_prfitem_from_merge_size() {
let merge_item = PrfItem::from_merge(Some("Merge".to_string())).unwrap();
dbg!(&merge_item);
let merge_item = PrfItem::from_merge(Some("Merge".to_string()))
.expect("Failed to create merge item in test");
let prfitem_size = mem::size_of_val(&merge_item);
dbg!(prfitem_size);
// Boxed version
let boxed_merge_item = Box::new(merge_item);
let box_prfitem_size = mem::size_of_val(&boxed_merge_item);
dbg!(box_prfitem_size);
// The size of Box<T> is always pointer-sized (usually 8 bytes on 64-bit)
// assert_eq!(box_prfitem_size, mem::size_of::<Box<PrfItem>>());
assert!(box_prfitem_size < prfitem_size);
}
#[test]
#[allow(unused_variables)]
fn test_draft_size_non_boxed() {
let draft = Draft::from(IRuntime::new());
let iruntime_size = std::mem::size_of_val(&draft);
dbg!(iruntime_size);
assert_eq!(iruntime_size, std::mem::size_of::<Draft<IRuntime>>());
}
#[test]
#[allow(unused_variables)]
fn test_draft_size_boxed() {
let draft = Draft::from(Box::new(IRuntime::new()));
let box_iruntime_size = std::mem::size_of_val(&draft);
dbg!(box_iruntime_size);
assert_eq!(
box_iruntime_size,
std::mem::size_of::<Draft<Box<IRuntime>>>()

View File

@@ -97,7 +97,7 @@ impl EventDrivenProxyManager {
let (event_tx, event_rx) = mpsc::unbounded_channel();
let (query_tx, query_rx) = mpsc::unbounded_channel();
Self::start_event_loop(state.clone(), event_rx, query_rx);
Self::start_event_loop(Arc::clone(&state), event_rx, query_rx);
Self {
state,

View File

@@ -13,7 +13,7 @@ pub fn use_script(
let outputs = Rc::new(RefCell::new(vec![]));
let copy_outputs = outputs.clone();
let copy_outputs = Rc::clone(&outputs);
unsafe {
let _ = context.register_global_builtin_callable(
"__verge_log__".into(),
@@ -125,6 +125,8 @@ fn escape_js_string_for_single_quote(s: &str) -> String {
}
#[test]
#[allow(unused_variables)]
#[allow(clippy::expect_used)]
fn test_script() {
let script = r#"
function main(config) {
@@ -153,15 +155,13 @@ fn test_script() {
let _ = serde_yaml::to_string(&config).expect("Failed to serialize config to YAML");
let yaml_config_size = std::mem::size_of_val(&config);
dbg!(yaml_config_size);
let box_yaml_config_size = std::mem::size_of_val(&Box::new(config));
dbg!(box_yaml_config_size);
dbg!(results);
assert!(box_yaml_config_size < yaml_config_size);
}
// 测试特殊字符转义功能
#[test]
#[allow(clippy::expect_used)]
fn test_escape_unescape() {
let test_string = r#"Hello "World"!\nThis is a test with \u00A9 copyright symbol."#;
let escaped = escape_js_string_for_single_quote(test_string);

View File

@@ -89,6 +89,8 @@ mod tests {
use serde_yaml::Value;
#[test]
#[allow(clippy::unwrap_used)]
#[allow(clippy::expect_used)]
fn test_delete_proxy_and_references() {
let config_str = r#"
proxies:
@@ -107,7 +109,8 @@ proxy-groups:
proxies:
- "proxy1"
"#;
let mut config: Mapping = serde_yaml::from_str(config_str).unwrap();
let mut config: Mapping =
serde_yaml::from_str(config_str).expect("Failed to parse test config YAML");
let seq = SeqMap {
prepend: Sequence::new(),
@@ -118,38 +121,51 @@ proxy-groups:
config = use_seq(seq, config, "proxies");
// Check if proxy1 is removed from proxies
let proxies = config.get("proxies").unwrap().as_sequence().unwrap();
let proxies = config
.get("proxies")
.expect("proxies field should exist")
.as_sequence()
.expect("proxies should be a sequence");
assert_eq!(proxies.len(), 1);
assert_eq!(
proxies[0]
.as_mapping()
.unwrap()
.expect("proxy should be a mapping")
.get("name")
.unwrap()
.expect("proxy should have name")
.as_str()
.unwrap(),
.expect("name should be string"),
"proxy2"
);
// Check if proxy1 is removed from all groups
let groups = config.get("proxy-groups").unwrap().as_sequence().unwrap();
let groups = config
.get("proxy-groups")
.expect("proxy-groups field should exist")
.as_sequence()
.expect("proxy-groups should be a sequence");
let group1_proxies = groups[0]
.as_mapping()
.unwrap()
.expect("group should be a mapping")
.get("proxies")
.unwrap()
.expect("group should have proxies")
.as_sequence()
.unwrap();
.expect("group proxies should be a sequence");
let group2_proxies = groups[1]
.as_mapping()
.unwrap()
.expect("group should be a mapping")
.get("proxies")
.unwrap()
.expect("group should have proxies")
.as_sequence()
.unwrap();
.expect("group proxies should be a sequence");
assert_eq!(group1_proxies.len(), 1);
assert_eq!(group1_proxies[0].as_str().unwrap(), "proxy2");
assert_eq!(
group1_proxies[0]
.as_str()
.expect("proxy name should be string"),
"proxy2"
);
assert_eq!(group2_proxies.len(), 0);
}
}

View File

@@ -152,7 +152,7 @@ impl LogsMonitor {
*current_level = Some(filter_level.clone());
}
let monitor_current = self.current.clone();
let monitor_current = Arc::clone(&self.current);
let task = tokio::spawn(async move {
loop {
@@ -183,7 +183,9 @@ impl LogsMonitor {
let _ = client
.get(&url)
.timeout(Duration::from_secs(30))
.process_lines(|line| Self::process_log_line(line, monitor_current.clone()))
.process_lines(|line| {
Self::process_log_line(line, Arc::clone(&monitor_current))
})
.await;
// Wait before retrying

View File

@@ -51,7 +51,7 @@ where
freshness_duration: Duration,
) -> Self {
let current = Arc::new(RwLock::new(T::default()));
let monitor_current = current.clone();
let monitor_current = Arc::clone(&current);
let endpoint_clone = endpoint.clone();
// Start the monitoring task
@@ -110,7 +110,7 @@ where
let _ = client
.get(&endpoint)
.timeout(timeout)
.process_lines(|line| T::parse_and_update(line, current.clone()))
.process_lines(|line| T::parse_and_update(line, Arc::clone(&current)))
.await;
tokio::time::sleep(retry_interval).await;

View File

@@ -49,7 +49,7 @@ impl ProxyRequestCache {
self.map
.remove_if(&key_cloned, |_, v| Arc::ptr_eq(v, &cell));
let new_cell = Arc::new(OnceCell::new());
self.map.insert(key_cloned.clone(), new_cell.clone());
self.map.insert(key_cloned.clone(), Arc::clone(&new_cell));
return Box::pin(self.get_or_fetch(key_cloned, ttl, fetch_fn)).await;
}
}