#!/usr/bin/env python import argparse import subprocess import logging import json from io import BytesIO from struct import pack, unpack from time import gmtime, strftime from collections import defaultdict # Logger setup log = logging.getLogger(__name__) log.setLevel(logging.WARNING) handler = logging.StreamHandler() handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s')) log.addHandler(handler) def flag_type(raw_flag): '''Solve cookie flags''' types = {0: None, 1: 'secure', 4: 'http only', 5: 'Secure, http only'} return types[raw_flag] def date(raw_date): '''Mac epoch to date string''' return strftime("%a, %d %b %Y ", gmtime(raw_date + 978307200))[:-1] def save(file, cookies): '''Write a list of cookies into a binary file''' file.seek(0, 0) file.truncate() # File Magic String: cook file.write(b'cook') # Divide cookies into pages pages = defaultdict(list) for cookie in cookies: pages[cookie['domain']].append(cookie) log.debug('Domains: %s', ', '.join(pages.keys())) log.debug('Cookies: %d', len(cookies)) # Number of pages # There is a page for each domain file.write(pack('>i', len(pages))) log.info('Writing %d pages', len(pages)) # Write page sizes # 4byte offset each cookie + cookies size + fixed header size for i, page in enumerate(pages.values()): cookies_size = sum([cookie['size'] for cookie in page]) offsets_size = 4 * len(page) file.write(pack('>i', cookies_size + offsets_size + 12)) log.debug('Page: %i size: %iB', i, cookies_size + offsets_size + 12) for i, page in enumerate(pages.values()): log.info('Writing page %d...', i + 1) # Write page header/offsets # Page header file.write(pack('>i', 0x00000100)) log.debug('cookies: %d', len(page)) # Number of cookies little-endian from here on file.write(pack('i', file.read(4))[0] # Each page size: 4 bytes * number of pages page_sizes = [] for _ in range(num_pages): page_sizes.append(unpack('>i', file.read(4))[0]) # Grab individual pages and each page will contain >= one cookie pages = [] for ps in page_sizes: pages.append(file.read(ps)) # page header: 4 bytes: Always 00000100. # Number of cookies in each page, # first 4 bytes after the page header in every page. for page in pages: page = BytesIO(page) page.read(4) num_cookies = unpack('= one cookie. # Fetch cookie starting point from page starting byte cookie_offsets = [] for _ in range(num_cookies): cookie_offsets.append(unpack('