Optimize json loading, filter and reduce
This commit is contained in:
parent
82e2a49cdc
commit
48cae5f709
3 changed files with 16 additions and 29 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -1229,7 +1229,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sticker-usage-analyzer"
|
name = "sticker-usage-analyzer"
|
||||||
version = "0.1.0"
|
version = "1.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chrono",
|
"chrono",
|
||||||
"maud",
|
"maud",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[package]
|
[package]
|
||||||
name = "sticker-usage-analyzer"
|
name = "sticker-usage-analyzer"
|
||||||
version = "0.1.0"
|
version = "1.0.0"
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
|
41
src/main.rs
41
src/main.rs
|
@ -1,6 +1,6 @@
|
||||||
use maud::{html, Markup, DOCTYPE};
|
use maud::{html, Markup, DOCTYPE};
|
||||||
use rocket::{Rocket,Build,launch,get,routes,fs::{FileServer}};
|
use rocket::{Rocket,Build,launch,get,routes,fs::{FileServer}};
|
||||||
use std::collections::HashMap;
|
use std::{fs::OpenOptions, io::BufReader, collections::HashMap};
|
||||||
use chrono::prelude::*;
|
use chrono::prelude::*;
|
||||||
|
|
||||||
|
|
||||||
|
@ -105,36 +105,23 @@ fn rocket() -> Rocket<Build> {
|
||||||
|
|
||||||
#[get("/")]
|
#[get("/")]
|
||||||
fn index() -> Markup {
|
fn index() -> Markup {
|
||||||
println!("Parsing JSON... (this can take a moment)");
|
|
||||||
let tg_export_result: Chat = serde_json::from_reader(
|
let tg_export_result: Chat = serde_json::from_reader(
|
||||||
std::fs::OpenOptions::new().read(true).open("./result.json").expect("Could not open ./result.json")
|
BufReader::new(OpenOptions::new().read(true).open("./result.json").expect("Could not open ./result.json"))
|
||||||
).expect("Could not parse result.json");
|
).expect("Could not parse result.json");
|
||||||
println!("Done!");
|
|
||||||
|
|
||||||
let mut messages: Vec<ScoredChatMessage> = tg_export_result.messages.into_iter().filter(|m| {
|
let mut messages: Vec<ScoredChatMessage> = tg_export_result.messages.into_iter()
|
||||||
return if let Some(media_type) = &m.media_type {
|
.filter(|m| matches!(m.media_type, Some(MediaType::Sticker)))
|
||||||
*media_type == MediaType::Sticker
|
.fold(HashMap::new(), |mut acc: HashMap<String, ScoredChatMessage>, message| {
|
||||||
} else {
|
if let Some(ref file) = message.file {
|
||||||
false
|
let message_date = (&message.date_unixtime).parse().expect("Could not parse date_unixtime");
|
||||||
}
|
let entry = acc.entry(file.to_owned()).or_insert_with(|| ScoredChatMessage::from(message));
|
||||||
}).fold(HashMap::new(), |mut acc: HashMap<String, ScoredChatMessage>, message| {
|
entry.last_used = entry.last_used.max(message_date);
|
||||||
let file = message.file.as_ref().expect("No file").to_owned();
|
entry.times += 1;
|
||||||
if acc.contains_key(&file) {
|
|
||||||
if let Some(scored_chat_message) = acc.get_mut(&file) {
|
|
||||||
let message_date = message.date_unixtime.parse().expect("Could not parse date_unixtime");
|
|
||||||
if scored_chat_message.last_used < message_date {
|
|
||||||
scored_chat_message.last_used = message_date;
|
|
||||||
}
|
|
||||||
scored_chat_message.times = scored_chat_message.times + 1;
|
|
||||||
}
|
}
|
||||||
} else {
|
acc
|
||||||
acc.insert(file, ScoredChatMessage::from(message));
|
})
|
||||||
}
|
.into_values()
|
||||||
|
.collect();
|
||||||
acc
|
|
||||||
})
|
|
||||||
.into_values()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
messages.sort_by(|a, b| b.times.cmp(&a.times));
|
messages.sort_by(|a, b| b.times.cmp(&a.times));
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue