Skip to content

Commit a2b2bc7

Browse files
committed
made Data into an Arc<InnerData>
1 parent acdf4b2 commit a2b2bc7

File tree

3 files changed

+31
-29
lines changed

3 files changed

+31
-29
lines changed

src/data.rs

Lines changed: 29 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use enum_map::{Enum, EnumMap};
22
use serde_derive::{Deserialize, Serialize};
33
use tokio::sync::Mutex;
44
use tokio::task::{spawn_blocking, JoinSet};
5-
use tracing::{debug, info, trace};
5+
use tracing::debug;
66

77
use crate::config::Config;
88
use std::collections::BTreeMap;
@@ -31,64 +31,66 @@ pub struct Repo {
3131
pub has_cargo_lock: bool,
3232
}
3333

34-
#[derive(Debug, Clone)]
35-
pub struct Data {
34+
#[derive(Debug)]
35+
pub struct InnerData {
3636
data_dir: PathBuf,
3737

38-
state_lock: Arc<Mutex<()>>,
38+
state_lock: Mutex<()>,
3939

40-
state_cache: Arc<State>,
40+
state_cache: State,
4141

42-
repos_state: Arc<Mutex<EnumMap<Forge, BTreeMap<String, Repo>>>>,
42+
repos_state: Mutex<EnumMap<Forge, BTreeMap<String, Repo>>>,
4343
}
4444

45+
#[derive(Debug, Clone)]
46+
pub struct Data(Arc<InnerData>);
47+
4548
impl Data {
4649
pub fn new(config: &Config) -> color_eyre::Result<Self> {
4750
fs::create_dir_all(&config.data_dir)?;
4851

49-
let mut data = Data {
50-
data_dir: config.data_dir.clone(),
51-
52-
state_lock: Arc::new(Mutex::new(())),
53-
state_cache: Arc::new(State::default()),
54-
repos_state: Arc::new(Mutex::new(EnumMap::default())),
52+
let state_path = config.data_dir.join("state.json");
53+
let state_cache = if state_path.exists() {
54+
serde_json::from_slice(&fs::read(&state_path)?)?
55+
} else {
56+
State::default()
5557
};
5658

57-
let state_path = data.state_path();
58-
if state_path.exists() {
59-
let state_cache: State = serde_json::from_slice(&fs::read(&state_path)?)?;
59+
let data = Self(Arc::new(InnerData {
60+
data_dir: config.data_dir.clone(),
6061

61-
data.state_cache = Arc::new(state_cache)
62-
}
62+
state_lock: Mutex::new(()),
63+
state_cache,
64+
repos_state: Mutex::new(EnumMap::default()),
65+
}));
6366

6467
Ok(data)
6568
}
6669

6770
pub fn state_path(&self) -> PathBuf {
68-
self.data_dir.join("state.json")
71+
self.0.data_dir.join("state.json")
6972
}
7073

7174
pub fn csv_path(&self, forge: Forge) -> PathBuf {
7275
match forge {
73-
Forge::Github => self.data_dir.join("github.csv"),
76+
Forge::Github => self.0.data_dir.join("github.csv"),
7477
}
7578
}
7679

7780
pub fn get_last_id(&self, forge: Forge) -> usize {
78-
self.state_cache.0[forge].load(std::sync::atomic::Ordering::SeqCst)
81+
self.0.state_cache.0[forge].load(std::sync::atomic::Ordering::SeqCst)
7982
}
8083

8184
/// Store the state cache to disk, i.e. last fetched ids
8285
async fn store_state_cache(&self) -> color_eyre::Result<()> {
83-
let state = self.state_cache.clone();
84-
let lock = self.state_lock.clone();
86+
let this = self.clone();
8587
let state_path = self.state_path();
8688
spawn_blocking(move || -> color_eyre::Result<()> {
87-
let guard = lock.blocking_lock();
89+
let guard = this.0.state_lock.blocking_lock();
8890

8991
let file = File::create(state_path)?;
9092
let mut file = BufWriter::new(file);
91-
serde_json::to_writer_pretty(&mut file, state.as_ref())?;
93+
serde_json::to_writer_pretty(&mut file, &this.0.state_cache)?;
9294
file.write_all(b"\n")?;
9395

9496
drop(guard);
@@ -102,7 +104,7 @@ impl Data {
102104
/// Stores the repos found to disk in a CSV
103105
async fn store_csv(&self) -> color_eyre::Result<()> {
104106
debug!("storing csv file");
105-
let mut repos = self.repos_state.lock().await;
107+
let mut repos = self.0.repos_state.lock().await;
106108

107109
let mut js = JoinSet::new();
108110

@@ -139,7 +141,7 @@ impl Data {
139141
}
140142

141143
pub async fn set_last_id(&self, forge: Forge, n: usize) -> color_eyre::Result<()> {
142-
self.state_cache.0[forge].store(n, std::sync::atomic::Ordering::SeqCst);
144+
self.0.state_cache.0[forge].store(n, std::sync::atomic::Ordering::SeqCst);
143145

144146
self.store_csv().await?;
145147
self.store_state_cache().await?;
@@ -148,7 +150,7 @@ impl Data {
148150
}
149151

150152
pub async fn store_repo(&self, forge: Forge, repo: Repo) {
151-
let mut repos_state = self.repos_state.lock().await;
153+
let mut repos_state = self.0.repos_state.lock().await;
152154
repos_state[forge].insert(repo.name.clone(), repo);
153155
}
154156
}

src/github/api.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ pub struct GraphRepository {
7878
}
7979

8080
impl GraphRepository {
81-
pub fn to_repo(self, has_cargo_toml: bool, has_cargo_lock: bool) -> Repo {
81+
pub fn into_repo(self, has_cargo_toml: bool, has_cargo_lock: bool) -> Repo {
8282
Repo {
8383
id: self.id,
8484
name: self.name_with_owner,

src/github/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ impl Scraper {
6060
.filter_map(Option::as_ref)
6161
.any(|el| el.name == "Rust")
6262
{
63-
let mut repo = repo.to_repo(false, false);
63+
let mut repo = repo.into_repo(false, false);
6464
let files = self.gh.tree(&repo, false).await;
6565
match files {
6666
Ok(tree) => {

0 commit comments

Comments
 (0)