1use std::collections::hash_map::DefaultHasher;
3use std::collections::{HashMap, HashSet};
4use std::hash::{BuildHasher, Hash, Hasher};
5use std::path::PathBuf;
6use std::sync::{LazyLock, Mutex};
7use std::time::Instant;
8
9use futures::stream::{self, StreamExt};
10use serde_json::Value;
11use tracing::{debug, info, warn};
12
13use crate::state::types::NewsFeedItem;
14
15use super::Result;
16use super::cache::{AUR_COMMENTS_CACHE, SKIP_CACHE_TTL_SECONDS, UPDATES_CACHE};
17use super::helpers::{
18 build_aur_update_item, build_official_update_item, fetch_official_package_date,
19 normalize_pkg_date, update_seen_for_comments,
20};
21use super::rate_limit::rate_limit;
22
23#[derive(Debug, Clone)]
34pub(super) enum FetchDateResult {
35 Success(Option<String>),
37 CachedFallback(Option<String>),
39 NeedsRetry,
41}
42
43static AUR_JSON_CHANGES_CACHE: LazyLock<Mutex<HashMap<String, String>>> =
46 LazyLock::new(|| Mutex::new(HashMap::new()));
47
48pub(super) static OFFICIAL_JSON_CHANGES_CACHE: LazyLock<Mutex<HashMap<String, String>>> =
51 LazyLock::new(|| Mutex::new(HashMap::new()));
52
53#[derive(Debug, Clone)]
66pub(super) struct AurVersionInfo {
67 pub name: String,
69 pub version: String,
71 pub last_modified: Option<i64>,
73}
74
75#[derive(Clone)]
77struct OfficialCandidate {
78 order: usize,
80 pkg: crate::state::PackageItem,
82 last_seen: Option<String>,
84 old_version: Option<String>,
86 remote_version: String,
88}
89
90fn process_official_packages<HV>(
107 installed_sorted: &[String],
108 seen_pkg_versions: &mut HashMap<String, String, HV>,
109 updates_versions: Option<&HashMap<String, (String, String)>>,
110 force_emit_all: bool,
111 mut remaining: usize,
112) -> (
113 Vec<OfficialCandidate>,
114 Vec<String>,
115 usize,
116 usize,
117 usize,
118 usize,
119)
120where
121 HV: BuildHasher,
122{
123 let mut aur_candidates: Vec<String> = Vec::new();
124 let mut official_candidates: Vec<OfficialCandidate> = Vec::new();
125 let mut baseline_only = 0usize;
126 let mut new_packages = 0usize;
127 let mut updated_packages = 0usize;
128
129 for name in installed_sorted {
130 if let Some(pkg) = crate::index::find_package_by_name(name) {
131 let (old_version_opt, remote_version) = updates_versions
132 .and_then(|m| m.get(&pkg.name))
133 .map_or((None, pkg.version.as_str()), |(old_v, new_v)| {
134 (Some(old_v.as_str()), new_v.as_str())
135 });
136 let remote_version = remote_version.to_string();
137 let last_seen = seen_pkg_versions.insert(pkg.name.clone(), remote_version.clone());
138 let is_new_package = last_seen.is_none();
139 let has_version_change = last_seen.as_ref() != Some(&remote_version);
140 let should_emit = remaining > 0 && (force_emit_all || has_version_change);
143 if should_emit {
144 if is_new_package {
145 new_packages = new_packages.saturating_add(1);
146 } else if has_version_change {
147 updated_packages = updated_packages.saturating_add(1);
148 }
149 let order = official_candidates.len();
150 official_candidates.push(OfficialCandidate {
151 order,
152 pkg: pkg.clone(),
153 last_seen,
154 old_version: old_version_opt.map(str::to_string),
155 remote_version,
156 });
157 remaining = remaining.saturating_sub(1);
158 } else {
159 baseline_only = baseline_only.saturating_add(1);
160 }
161 } else {
162 aur_candidates.push(name.clone());
163 }
164 }
165
166 (
167 official_candidates,
168 aur_candidates,
169 new_packages,
170 updated_packages,
171 baseline_only,
172 remaining,
173 )
174}
175
176fn process_aur_packages<HV>(
193 aur_info: Vec<AurVersionInfo>,
194 seen_pkg_versions: &mut HashMap<String, String, HV>,
195 updates_versions: Option<&HashMap<String, (String, String)>>,
196 force_emit_all: bool,
197 mut remaining: usize,
198) -> (Vec<NewsFeedItem>, usize, usize, usize, usize)
199where
200 HV: BuildHasher,
201{
202 let mut items = Vec::new();
203 let mut aur_new_packages = 0usize;
204 let mut aur_updated_packages = 0usize;
205 let mut baseline_only = 0usize;
206
207 for pkg in aur_info {
208 if remaining == 0 {
209 break;
210 }
211 let (old_version_opt, remote_version) = updates_versions
212 .and_then(|m| m.get(&pkg.name))
213 .map_or((None, pkg.version.as_str()), |(old_v, new_v)| {
214 (Some(old_v.as_str()), new_v.as_str())
215 });
216 let remote_version = remote_version.to_string();
217 let last_seen = seen_pkg_versions.insert(pkg.name.clone(), remote_version.clone());
218 let is_new_package = last_seen.is_none();
219 let has_version_change = last_seen.as_ref() != Some(&remote_version);
220 let should_emit = remaining > 0 && (force_emit_all || has_version_change);
223 if should_emit {
224 if is_new_package {
225 aur_new_packages = aur_new_packages.saturating_add(1);
226 } else if has_version_change {
227 aur_updated_packages = aur_updated_packages.saturating_add(1);
228 }
229 items.push(build_aur_update_item(
230 &pkg,
231 last_seen.as_ref(),
232 old_version_opt,
233 &remote_version,
234 ));
235 remaining = remaining.saturating_sub(1);
236 } else {
237 baseline_only = baseline_only.saturating_add(1);
238 }
239 }
240
241 (
242 items,
243 aur_new_packages,
244 aur_updated_packages,
245 baseline_only,
246 remaining,
247 )
248}
249
250const MAX_RETRIES_PER_PACKAGE: u8 = 3;
252
253const RETRY_BASE_DELAY_MS: u64 = 10_000; const RETRY_DELAY_MULTIPLIER: u64 = 2;
258
259#[derive(Clone)]
261struct BackgroundRetryCandidate {
262 pkg_name: String,
264 repo_slug: String,
266 arch_slug: String,
268 retry_count: u8,
270}
271
272async fn fetch_official_dates_with_retry(
287 candidates: Vec<OfficialCandidate>,
288) -> Vec<(usize, NewsFeedItem)> {
289 let mut retry_queue: Vec<BackgroundRetryCandidate> = Vec::new();
290 let mut official_items: Vec<(usize, NewsFeedItem)> = Vec::new();
291
292 let fetch_results: Vec<(OfficialCandidate, FetchDateResult)> = stream::iter(candidates)
294 .map(|candidate| async move {
295 let result = fetch_official_package_date(&candidate.pkg).await;
296 (candidate, result)
297 })
298 .buffer_unordered(5)
299 .collect::<Vec<_>>()
300 .await;
301
302 for (candidate, result) in fetch_results {
303 match result {
304 FetchDateResult::Success(date) | FetchDateResult::CachedFallback(date) => {
305 let item = build_official_update_item(
306 &candidate.pkg,
307 candidate.last_seen.as_ref(),
308 candidate.old_version.as_deref(),
309 &candidate.remote_version,
310 date,
311 );
312 official_items.push((candidate.order, item));
313 }
314 FetchDateResult::NeedsRetry => {
315 debug!(
317 package = %candidate.pkg.name,
318 "package needs retry, using today's date and queuing for background retry"
319 );
320 let item = build_official_update_item(
321 &candidate.pkg,
322 candidate.last_seen.as_ref(),
323 candidate.old_version.as_deref(),
324 &candidate.remote_version,
325 None, );
327 official_items.push((candidate.order, item));
328
329 if let crate::state::Source::Official { repo, arch } = &candidate.pkg.source {
331 let repo_slug = repo.to_lowercase();
332 let arch_slug = if arch.is_empty() {
333 std::env::consts::ARCH.to_string()
334 } else {
335 arch.clone()
336 };
337 retry_queue.push(BackgroundRetryCandidate {
338 pkg_name: candidate.pkg.name.clone(),
339 repo_slug,
340 arch_slug,
341 retry_count: 0,
342 });
343 }
344 }
345 }
346 }
347
348 if !retry_queue.is_empty() {
350 info!(
351 "spawning background retry task for {} packages",
352 retry_queue.len()
353 );
354 tokio::spawn(process_retry_queue_background(retry_queue));
355 }
356
357 official_items
358}
359
360async fn process_retry_queue_background(initial_queue: Vec<BackgroundRetryCandidate>) {
374 use std::collections::VecDeque;
375
376 let mut retry_queue: VecDeque<BackgroundRetryCandidate> = initial_queue.into_iter().collect();
377
378 info!(
379 "background retry task started with {} packages",
380 retry_queue.len()
381 );
382
383 while let Some(mut retry_item) = retry_queue.pop_front() {
384 retry_item.retry_count += 1;
385
386 let delay_ms = RETRY_BASE_DELAY_MS
388 * RETRY_DELAY_MULTIPLIER
389 .saturating_pow(u32::from(retry_item.retry_count).saturating_sub(1));
390 info!(
391 package = %retry_item.pkg_name,
392 retry_attempt = retry_item.retry_count,
393 queue_remaining = retry_queue.len(),
394 delay_ms,
395 "background retry: waiting before attempt"
396 );
397 tokio::time::sleep(tokio::time::Duration::from_millis(delay_ms)).await;
398
399 let result = fetch_official_json_for_cache(
401 &retry_item.pkg_name,
402 &retry_item.repo_slug,
403 &retry_item.arch_slug,
404 )
405 .await;
406
407 match result {
408 Ok(()) => {
409 info!(
410 package = %retry_item.pkg_name,
411 retry_attempt = retry_item.retry_count,
412 "background retry succeeded, cache updated"
413 );
414 }
415 Err(needs_retry) if needs_retry => {
416 if retry_item.retry_count < MAX_RETRIES_PER_PACKAGE {
417 debug!(
419 package = %retry_item.pkg_name,
420 retry_attempt = retry_item.retry_count,
421 "background retry failed, adding back to end of queue"
422 );
423 retry_queue.push_back(retry_item);
424 } else {
425 warn!(
426 package = %retry_item.pkg_name,
427 max_retries = MAX_RETRIES_PER_PACKAGE,
428 "background retry: all attempts exhausted"
429 );
430 }
431 }
432 Err(_) => {
433 debug!(
435 package = %retry_item.pkg_name,
436 "background retry: completed (cache or non-retryable)"
437 );
438 }
439 }
440 }
441
442 info!("background retry task completed");
443}
444
445async fn fetch_official_json_for_cache(
461 pkg_name: &str,
462 repo_slug: &str,
463 arch_slug: &str,
464) -> std::result::Result<(), bool> {
465 use super::rate_limit::{
466 check_circuit_breaker, increase_archlinux_backoff, rate_limit_archlinux,
467 record_circuit_breaker_outcome, reset_archlinux_backoff,
468 };
469
470 let url = format!("https://archlinux.org/packages/{repo_slug}/{arch_slug}/{pkg_name}/json/",);
471 let endpoint_pattern = "/packages/*/json/";
472 let cache_path = official_json_cache_path(repo_slug, arch_slug, pkg_name);
473
474 if check_circuit_breaker(endpoint_pattern).is_err() {
476 debug!(
477 package = %pkg_name,
478 "background retry: circuit breaker blocking"
479 );
480 return Err(true); }
482
483 let _permit = rate_limit_archlinux().await;
485
486 let result = tokio::time::timeout(
488 tokio::time::Duration::from_millis(5000),
489 tokio::task::spawn_blocking({
490 let url = url.clone();
491 move || crate::util::curl::curl_json(&url)
492 }),
493 )
494 .await;
495
496 match result {
497 Ok(Ok(Ok(json))) => {
498 reset_archlinux_backoff();
499 record_circuit_breaker_outcome(endpoint_pattern, true);
500
501 if let Err(e) = save_official_json_cache(&cache_path, &json) {
503 debug!(
504 error = %e,
505 package = %pkg_name,
506 "background retry: failed to save cache"
507 );
508 }
509 Ok(())
510 }
511 Ok(Ok(Err(e))) => {
512 increase_archlinux_backoff(None);
513 record_circuit_breaker_outcome(endpoint_pattern, false);
514 debug!(
515 package = %pkg_name,
516 error = %e,
517 "background retry: fetch failed"
518 );
519 Err(true) }
521 Ok(Err(e)) => {
522 increase_archlinux_backoff(None);
523 record_circuit_breaker_outcome(endpoint_pattern, false);
524 debug!(
525 package = %pkg_name,
526 error = ?e,
527 "background retry: task join failed"
528 );
529 Err(true) }
531 Err(_) => {
532 increase_archlinux_backoff(None);
533 record_circuit_breaker_outcome(endpoint_pattern, false);
534 debug!(package = %pkg_name, "background retry: timeout");
535 Err(true) }
537 }
538}
539
540#[must_use]
550fn aur_json_cache_dir() -> PathBuf {
551 crate::theme::lists_dir().join("aur_json_cache")
552}
553
554const AUR_CACHE_KEY_MAX_LEN: usize = 200;
556
557fn aur_json_cache_path(pkgnames: &[String]) -> PathBuf {
569 let mut sorted = pkgnames.to_vec();
570 sorted.sort();
571 let key = sorted.join(",");
572 let safe_key = key
573 .chars()
574 .map(|c| {
575 if c.is_alphanumeric() || c == ',' || c == '-' || c == '_' {
576 c
577 } else {
578 '_'
579 }
580 })
581 .collect::<String>();
582 let filename_key = if safe_key.len() <= AUR_CACHE_KEY_MAX_LEN {
583 safe_key
584 } else {
585 let mut hasher = DefaultHasher::new();
586 key.hash(&mut hasher);
587 format!("{:016x}", hasher.finish())
588 };
589 aur_json_cache_dir().join(format!("{filename_key}.json"))
590}
591
592fn load_aur_json_cache(cache_path: &PathBuf) -> Option<Value> {
603 let data = std::fs::read_to_string(cache_path).ok()?;
604 serde_json::from_str::<Value>(&data).ok()
605}
606
607fn save_aur_json_cache(cache_path: &PathBuf, json: &Value) -> std::io::Result<()> {
620 if let Some(parent) = cache_path.parent() {
621 std::fs::create_dir_all(parent)?;
622 }
623 let pretty = serde_json::to_string_pretty(json)?;
624 std::fs::write(cache_path, pretty)
625}
626
627fn compare_aur_json_changes(old_json: &Value, new_json: &Value, pkg_name: &str) -> Option<String> {
641 let mut changes = Vec::new();
642
643 let old_version = old_json.get("Version").and_then(Value::as_str);
645 let new_version = new_json.get("Version").and_then(Value::as_str);
646 if old_version != new_version
647 && let (Some(old_v), Some(new_v)) = (old_version, new_version)
648 && old_v != new_v
649 {
650 changes.push(format!("Version: {old_v} → {new_v}"));
651 }
652
653 let old_desc = old_json.get("Description").and_then(Value::as_str);
655 let new_desc = new_json.get("Description").and_then(Value::as_str);
656 if old_desc != new_desc
657 && let (Some(old_d), Some(new_d)) = (old_desc, new_desc)
658 && old_d != new_d
659 {
660 changes.push("Description changed".to_string());
661 }
662
663 let old_maintainer = old_json.get("Maintainer").and_then(Value::as_str);
665 let new_maintainer = new_json.get("Maintainer").and_then(Value::as_str);
666 if old_maintainer != new_maintainer
667 && let (Some(old_m), Some(new_m)) = (old_maintainer, new_maintainer)
668 && old_m != new_m
669 {
670 changes.push(format!("Maintainer: {old_m} → {new_m}"));
671 }
672
673 let old_url = old_json.get("URL").and_then(Value::as_str);
675 let new_url = new_json.get("URL").and_then(Value::as_str);
676 if old_url != new_url
677 && let (Some(old_u), Some(new_u)) = (old_url, new_url)
678 && old_u != new_u
679 {
680 changes.push("URL changed".to_string());
681 }
682
683 let old_license = old_json.get("License").and_then(Value::as_array);
685 let new_license = new_json.get("License").and_then(Value::as_array);
686 if old_license != new_license {
687 changes.push("License changed".to_string());
688 }
689
690 let old_keywords = old_json.get("Keywords").and_then(Value::as_array);
692 let new_keywords = new_json.get("Keywords").and_then(Value::as_array);
693 if old_keywords != new_keywords {
694 changes.push("Keywords changed".to_string());
695 }
696
697 if changes.is_empty() {
698 None
699 } else {
700 Some(format!(
701 "Changes detected for {pkg_name}:\n{}",
702 changes.join("\n")
703 ))
704 }
705}
706
707#[must_use]
717fn official_json_cache_dir() -> PathBuf {
718 crate::theme::lists_dir().join("official_json_cache")
719}
720
721#[must_use]
734pub fn official_json_cache_path(repo: &str, arch: &str, pkg_name: &str) -> PathBuf {
735 let safe_repo = repo
736 .chars()
737 .map(|c| {
738 if c.is_alphanumeric() || c == '-' || c == '_' {
739 c
740 } else {
741 '_'
742 }
743 })
744 .collect::<String>();
745 let safe_arch = arch
746 .chars()
747 .map(|c| {
748 if c.is_alphanumeric() || c == '-' || c == '_' {
749 c
750 } else {
751 '_'
752 }
753 })
754 .collect::<String>();
755 let safe_name = pkg_name
756 .chars()
757 .map(|c| {
758 if c.is_alphanumeric() || c == '-' || c == '_' {
759 c
760 } else {
761 '_'
762 }
763 })
764 .collect::<String>();
765 official_json_cache_dir().join(format!("{safe_repo}_{safe_arch}_{safe_name}.json"))
766}
767
768#[must_use]
779pub fn load_official_json_cache(cache_path: &std::path::Path) -> Option<Value> {
780 let data = std::fs::read_to_string(cache_path).ok()?;
781 serde_json::from_str::<Value>(&data).ok()
782}
783
784pub(super) fn save_official_json_cache(cache_path: &PathBuf, json: &Value) -> std::io::Result<()> {
797 if let Some(parent) = cache_path.parent() {
798 std::fs::create_dir_all(parent)?;
799 }
800 let pretty = serde_json::to_string_pretty(json)?;
801 std::fs::write(cache_path, pretty)
802}
803
804pub(super) fn compare_official_json_changes(
818 old_json: &Value,
819 new_json: &Value,
820 pkg_name: &str,
821) -> Option<String> {
822 let mut changes = Vec::new();
823
824 let old_pkg = old_json.get("pkg").unwrap_or(old_json);
826 let new_pkg = new_json.get("pkg").unwrap_or(new_json);
827
828 let old_version = old_pkg.get("pkgver").and_then(Value::as_str);
830 let new_version = new_pkg.get("pkgver").and_then(Value::as_str);
831 if old_version != new_version
832 && let (Some(old_v), Some(new_v)) = (old_version, new_version)
833 && old_v != new_v
834 {
835 changes.push(format!("Version: {old_v} → {new_v}"));
836 }
837
838 let old_desc = old_pkg.get("pkgdesc").and_then(Value::as_str);
840 let new_desc = new_pkg.get("pkgdesc").and_then(Value::as_str);
841 if old_desc != new_desc
842 && let (Some(old_d), Some(new_d)) = (old_desc, new_desc)
843 && old_d != new_d
844 {
845 changes.push("Description changed".to_string());
846 }
847
848 let old_licenses = old_pkg.get("licenses").and_then(Value::as_array);
850 let new_licenses = new_pkg.get("licenses").and_then(Value::as_array);
851 if old_licenses != new_licenses {
852 changes.push("Licenses changed".to_string());
853 }
854
855 let old_url = old_pkg.get("url").and_then(Value::as_str);
857 let new_url = new_pkg.get("url").and_then(Value::as_str);
858 if old_url != new_url
859 && let (Some(old_u), Some(new_u)) = (old_url, new_url)
860 && old_u != new_u
861 {
862 changes.push("URL changed".to_string());
863 }
864
865 let old_groups = old_pkg.get("groups").and_then(Value::as_array);
867 let new_groups = new_pkg.get("groups").and_then(Value::as_array);
868 if old_groups != new_groups {
869 changes.push("Groups changed".to_string());
870 }
871
872 let old_depends = old_pkg.get("depends").and_then(Value::as_array);
874 let new_depends = new_pkg.get("depends").and_then(Value::as_array);
875 if old_depends != new_depends {
876 changes.push("Dependencies changed".to_string());
877 }
878
879 let old_last_update = old_json.get("last_update").and_then(Value::as_str);
881 let new_last_update = new_json.get("last_update").and_then(Value::as_str);
882 if old_last_update != new_last_update
883 && let (Some(old_date), Some(new_date)) = (old_last_update, new_last_update)
884 && old_date != new_date
885 {
886 if let (Some(old_norm), Some(new_norm)) =
888 (normalize_pkg_date(old_date), normalize_pkg_date(new_date))
889 && old_norm != new_norm
890 {
891 changes.push(format!("Last update: {old_norm} → {new_norm}"));
892 }
893 }
894
895 if changes.is_empty() {
896 None
897 } else {
898 Some(format!(
899 "Changes detected for {pkg_name}:\n{}",
900 changes.join("\n")
901 ))
902 }
903}
904
905#[must_use]
916pub fn get_aur_json_changes(pkg_name: &str) -> Option<String> {
917 AUR_JSON_CHANGES_CACHE
918 .lock()
919 .ok()
920 .and_then(|cache| cache.get(pkg_name).cloned())
921}
922
923#[must_use]
934pub fn get_official_json_changes(pkg_name: &str) -> Option<String> {
935 OFFICIAL_JSON_CHANGES_CACHE
936 .lock()
937 .ok()
938 .and_then(|cache| cache.get(pkg_name).cloned())
939}
940
941async fn fetch_aur_versions(pkgnames: &[String]) -> Result<Vec<AurVersionInfo>> {
954 if pkgnames.is_empty() {
955 return Ok(Vec::new());
956 }
957 let args: String = pkgnames
958 .iter()
959 .map(|n| format!("arg[]={}", crate::util::percent_encode(n)))
960 .collect::<Vec<String>>()
961 .join("&");
962 let url = format!("https://aur.archlinux.org/rpc/v5/info?{args}");
963 rate_limit().await;
965
966 let cache_path = aur_json_cache_path(pkgnames);
968 let old_json = load_aur_json_cache(&cache_path);
969
970 let resp = tokio::task::spawn_blocking(move || crate::util::curl::curl_json(&url)).await??;
971
972 if let Some(old_json) = old_json
974 && let Some(results_old) = old_json.get("results").and_then(Value::as_array)
975 && let Some(results_new) = resp.get("results").and_then(Value::as_array)
976 {
977 let old_map: HashMap<String, &Value> = results_old
979 .iter()
980 .filter_map(|obj| {
981 obj.get("Name")
982 .and_then(Value::as_str)
983 .map(|name| (name.to_string(), obj))
984 })
985 .collect();
986 let new_map: HashMap<String, &Value> = results_new
987 .iter()
988 .filter_map(|obj| {
989 obj.get("Name")
990 .and_then(Value::as_str)
991 .map(|name| (name.to_string(), obj))
992 })
993 .collect();
994
995 let mut changes_cache = AUR_JSON_CHANGES_CACHE
997 .lock()
998 .unwrap_or_else(std::sync::PoisonError::into_inner);
999 for (pkg_name, new_obj) in &new_map {
1000 if let Some(old_obj) = old_map.get(pkg_name)
1001 && let Some(change_desc) = compare_aur_json_changes(old_obj, new_obj, pkg_name)
1002 {
1003 changes_cache.insert(pkg_name.clone(), change_desc);
1004 }
1005 }
1006 }
1007
1008 if let Err(e) = save_aur_json_cache(&cache_path, &resp) {
1010 warn!(error = %e, path = ?cache_path, "failed to save AUR JSON cache");
1011 } else {
1012 debug!(path = ?cache_path, "saved AUR JSON cache");
1013 }
1014
1015 let results = resp
1016 .get("results")
1017 .and_then(|v| v.as_array())
1018 .cloned()
1019 .unwrap_or_default();
1020 let mut out = Vec::new();
1021 for obj in results {
1022 if let Some(name) = obj.get("Name").and_then(serde_json::Value::as_str) {
1023 let version = obj
1024 .get("Version")
1025 .and_then(serde_json::Value::as_str)
1026 .unwrap_or_default()
1027 .to_string();
1028 let last_modified = obj.get("LastModified").and_then(serde_json::Value::as_i64);
1029 out.push(AurVersionInfo {
1030 name: name.to_string(),
1031 version,
1032 last_modified,
1033 });
1034 }
1035 }
1036 Ok(out)
1037}
1038
1039pub(super) async fn fetch_installed_updates<HS, HV>(
1055 installed: &HashSet<String, HS>,
1056 limit: usize,
1057 seen_pkg_versions: &mut HashMap<String, String, HV>,
1058 force_emit_all: bool,
1059 updates_versions: Option<&HashMap<String, (String, String)>>,
1060) -> Result<Vec<NewsFeedItem>>
1061where
1062 HS: BuildHasher + Send + Sync + 'static,
1063 HV: BuildHasher + Send + Sync + 'static,
1064{
1065 if let Ok(cache_guard) = UPDATES_CACHE.lock()
1067 && let Some((cached_items, last_fetch)) = cache_guard.as_ref()
1068 && last_fetch.elapsed().as_secs() < SKIP_CACHE_TTL_SECONDS
1069 {
1070 info!(
1071 "fetch_installed_updates: using cached results (age={}s, items={})",
1072 last_fetch.elapsed().as_secs(),
1073 cached_items.len()
1074 );
1075 return Ok(cached_items.clone());
1076 }
1077
1078 debug!(
1079 "fetch_installed_updates: starting, installed_count={}, limit={}, force_emit_all={}",
1080 installed.len(),
1081 limit,
1082 force_emit_all
1083 );
1084 let mut items = Vec::new();
1085 let mut installed_sorted: Vec<String> = installed.iter().cloned().collect();
1086 installed_sorted.sort();
1087
1088 debug!(
1089 "fetch_installed_updates: processing {} installed packages",
1090 installed_sorted.len()
1091 );
1092 let (
1094 official_candidates,
1095 aur_candidates,
1096 new_packages,
1097 updated_packages,
1098 baseline_only,
1099 remaining,
1100 ) = process_official_packages(
1101 &installed_sorted,
1102 seen_pkg_versions,
1103 updates_versions,
1104 force_emit_all,
1105 limit,
1106 );
1107 info!(
1108 "fetch_installed_updates: official scan complete, new_packages={}, updated_packages={}, baseline_only={}",
1109 new_packages, updated_packages, baseline_only
1110 );
1111
1112 if !official_candidates.is_empty() {
1114 debug!(
1115 "fetch_installed_updates: fetching dates for {} official packages (rate-limited)",
1116 official_candidates.len()
1117 );
1118 let mut official_items = fetch_official_dates_with_retry(official_candidates).await;
1119 official_items.sort_by_key(|(order, _)| *order);
1120 for (_, item) in official_items {
1121 items.push(item);
1122 }
1123 debug!(
1124 "fetch_installed_updates: official packages processed, items={}, aur_candidates={}, remaining={}",
1125 items.len(),
1126 aur_candidates.len(),
1127 remaining
1128 );
1129 }
1130
1131 if aur_candidates.is_empty() {
1135 debug!("fetch_installed_updates: no AUR candidates, skipping AUR fetch");
1136 return Ok(items);
1137 }
1138
1139 debug!(
1140 "fetch_installed_updates: fetching AUR versions for {} candidates",
1141 aur_candidates.len()
1142 );
1143 let aur_info = fetch_aur_versions(&aur_candidates).await?;
1144 debug!(
1145 "fetch_installed_updates: fetched {} AUR package versions",
1146 aur_info.len()
1147 );
1148 let aur_remaining = limit / 2;
1151 let (mut aur_items, aur_new_packages, aur_updated_packages, aur_baseline_only, _remaining) =
1152 process_aur_packages(
1153 aur_info,
1154 seen_pkg_versions,
1155 updates_versions,
1156 force_emit_all,
1157 aur_remaining,
1158 );
1159 items.append(&mut aur_items);
1160 let baseline_only = baseline_only.saturating_add(aur_baseline_only);
1161
1162 info!(
1163 emitted = items.len(),
1164 new_packages,
1165 updated_packages,
1166 aur_new_packages,
1167 aur_updated_packages,
1168 baseline_only,
1169 installed_total = installed.len(),
1170 aur_candidates = aur_candidates.len(),
1171 "installed update feed built"
1172 );
1173
1174 if let Ok(mut cache_guard) = UPDATES_CACHE.lock() {
1176 *cache_guard = Some((items.clone(), Instant::now()));
1177 }
1178
1179 Ok(items)
1180}
1181
1182pub(super) async fn fetch_installed_aur_comments<HS, HC>(
1197 installed: &HashSet<String, HS>,
1198 limit: usize,
1199 seen_aur_comments: &mut HashMap<String, String, HC>,
1200 force_emit_all: bool,
1201) -> Result<Vec<NewsFeedItem>>
1202where
1203 HS: BuildHasher + Send + Sync + 'static,
1204 HC: BuildHasher + Send + Sync + 'static,
1205{
1206 if let Ok(cache_guard) = AUR_COMMENTS_CACHE.lock()
1208 && let Some((cached_items, last_fetch)) = cache_guard.as_ref()
1209 && last_fetch.elapsed().as_secs() < SKIP_CACHE_TTL_SECONDS
1210 {
1211 info!(
1212 "fetch_installed_aur_comments: using cached results (age={}s, items={})",
1213 last_fetch.elapsed().as_secs(),
1214 cached_items.len()
1215 );
1216 return Ok(cached_items.clone());
1217 }
1218
1219 let mut items = Vec::new();
1220 if limit == 0 {
1221 return Ok(items);
1222 }
1223 let mut aur_names: Vec<String> = installed
1224 .iter()
1225 .filter_map(|name| {
1226 if crate::index::find_package_by_name(name).is_some() {
1227 None
1228 } else {
1229 Some(name.clone())
1230 }
1231 })
1232 .collect();
1233 aur_names.sort();
1234 let mut baseline_only = 0usize;
1235
1236 for pkgname in &aur_names {
1237 if items.len() >= limit {
1238 break;
1239 }
1240 match crate::sources::fetch_aur_comments(pkgname.clone()).await {
1241 Ok(comments) => {
1242 if comments.is_empty() {
1243 continue;
1244 }
1245 let newly_seen = update_seen_for_comments(
1246 pkgname,
1247 &comments,
1248 seen_aur_comments,
1249 limit.saturating_sub(items.len()),
1250 force_emit_all,
1251 );
1252 if newly_seen.is_empty() {
1253 baseline_only = baseline_only.saturating_add(1);
1254 }
1255 items.extend(newly_seen);
1256 }
1257 Err(e) => warn!(error = %e, pkg = %pkgname, "failed to fetch AUR comments"),
1258 }
1259 }
1260
1261 debug!(
1262 candidates = aur_names.len(),
1263 emitted = items.len(),
1264 baseline_only,
1265 "installed AUR comments feed built"
1266 );
1267
1268 if let Ok(mut cache_guard) = AUR_COMMENTS_CACHE.lock() {
1270 *cache_guard = Some((items.clone(), Instant::now()));
1271 }
1272
1273 Ok(items)
1274}
1275
1276#[cfg(test)]
1277mod tests {
1278 use super::aur_json_cache_path;
1279
1280 #[test]
1285 fn aur_json_cache_path_long_list_uses_short_filename() {
1286 let many: Vec<String> = (0..60).map(|i| format!("pkg-{i}")).collect();
1287 let path = aur_json_cache_path(&many);
1288 let name = path.file_name().expect("has filename").to_string_lossy();
1289 assert!(
1290 name.len() <= 255,
1291 "filename must not exceed NAME_MAX: len={}",
1292 name.len()
1293 );
1294 assert!(name.ends_with(".json"));
1295 assert!(
1297 name.len() <= 25,
1298 "long key should use hash (short name): {name}"
1299 );
1300 }
1301
1302 #[test]
1303 fn aur_json_cache_path_deterministic() {
1304 let a = vec!["b".into(), "a".into()];
1305 let b = vec!["a".into(), "b".into()];
1306 assert_eq!(aur_json_cache_path(&a), aur_json_cache_path(&b));
1307 }
1308}