Optimize json loading, filter and reduce
This commit is contained in:
parent
82e2a49cdc
commit
48cae5f709
3 changed files with 16 additions and 29 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1229,7 +1229,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "sticker-usage-analyzer"
|
||||
version = "0.1.0"
|
||||
version = "1.0.0"
|
||||
dependencies = [
|
||||
"chrono",
|
||||
"maud",
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "sticker-usage-analyzer"
|
||||
version = "0.1.0"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
|
33
src/main.rs
33
src/main.rs
|
@ -1,6 +1,6 @@
|
|||
use maud::{html, Markup, DOCTYPE};
|
||||
use rocket::{Rocket,Build,launch,get,routes,fs::{FileServer}};
|
||||
use std::collections::HashMap;
|
||||
use std::{fs::OpenOptions, io::BufReader, collections::HashMap};
|
||||
use chrono::prelude::*;
|
||||
|
||||
|
||||
|
@ -105,32 +105,19 @@ fn rocket() -> Rocket<Build> {
|
|||
|
||||
#[get("/")]
|
||||
fn index() -> Markup {
|
||||
println!("Parsing JSON... (this can take a moment)");
|
||||
let tg_export_result: Chat = serde_json::from_reader(
|
||||
std::fs::OpenOptions::new().read(true).open("./result.json").expect("Could not open ./result.json")
|
||||
BufReader::new(OpenOptions::new().read(true).open("./result.json").expect("Could not open ./result.json"))
|
||||
).expect("Could not parse result.json");
|
||||
println!("Done!");
|
||||
|
||||
let mut messages: Vec<ScoredChatMessage> = tg_export_result.messages.into_iter().filter(|m| {
|
||||
return if let Some(media_type) = &m.media_type {
|
||||
*media_type == MediaType::Sticker
|
||||
} else {
|
||||
false
|
||||
let mut messages: Vec<ScoredChatMessage> = tg_export_result.messages.into_iter()
|
||||
.filter(|m| matches!(m.media_type, Some(MediaType::Sticker)))
|
||||
.fold(HashMap::new(), |mut acc: HashMap<String, ScoredChatMessage>, message| {
|
||||
if let Some(ref file) = message.file {
|
||||
let message_date = (&message.date_unixtime).parse().expect("Could not parse date_unixtime");
|
||||
let entry = acc.entry(file.to_owned()).or_insert_with(|| ScoredChatMessage::from(message));
|
||||
entry.last_used = entry.last_used.max(message_date);
|
||||
entry.times += 1;
|
||||
}
|
||||
}).fold(HashMap::new(), |mut acc: HashMap<String, ScoredChatMessage>, message| {
|
||||
let file = message.file.as_ref().expect("No file").to_owned();
|
||||
if acc.contains_key(&file) {
|
||||
if let Some(scored_chat_message) = acc.get_mut(&file) {
|
||||
let message_date = message.date_unixtime.parse().expect("Could not parse date_unixtime");
|
||||
if scored_chat_message.last_used < message_date {
|
||||
scored_chat_message.last_used = message_date;
|
||||
}
|
||||
scored_chat_message.times = scored_chat_message.times + 1;
|
||||
}
|
||||
} else {
|
||||
acc.insert(file, ScoredChatMessage::from(message));
|
||||
}
|
||||
|
||||
acc
|
||||
})
|
||||
.into_values()
|
||||
|
|
Loading…
Reference in a new issue