Added firefox history summary

This commit is contained in:
Liliesh 2025-07-10 22:02:52 +02:00
commit 2a2e158226
Signed by: liliesh
GPG key ID: 680387646C7BAE8E
2 changed files with 45 additions and 0 deletions

View file

@ -0,0 +1,43 @@
import json
from urllib.parse import urlparse
def sort_and_save_results(pages, fileName):
sortedpages = sorted(pages.items(), key=lambda x:x[1], reverse=True)
sorteddict = dict(sortedpages)
with open(f"{fileName}.csv", "w") as f:
f.write("page;visits\n")
for page, visits in sorteddict.items():
f.write(f"{page};{str(visits)}\n")
def get_results_by_url(history):
pages = {}
for i, hentry in enumerate(history):
pages[hentry["url"]] = hentry["vcount"]
sort_and_save_results(pages, "by-url")
def get_results_by_domain(history):
pages = {}
for i, hentry in enumerate(history):
domain = urlparse(hentry["url"]).netloc
if domain not in pages:
pages[domain] = hentry["vcount"]
else:
pages[domain] = pages[domain] + hentry["vcount"]
sort_and_save_results(pages, "by-domain")
def main():
with open('history.json', 'r') as file:
data = json.load(file)
history = []
for entry in data:
history.append({"url": entry["url"], "title": entry["title"],"lvt": entry["lastVisitTime"],"vcount": entry["visitCount"]})
get_results_by_domain(history)
get_results_by_url(history)
if __name__ == "__main__":
main()