Skip to content
This repository has been archived by the owner on Nov 5, 2023. It is now read-only.

Commit

Permalink
Add tool to generate fake media from the provided hashs and sizes
Browse files Browse the repository at this point in the history
  • Loading branch information
CosmicHorrorDev committed May 25, 2020
1 parent 05170f9 commit f079f70
Show file tree
Hide file tree
Showing 3 changed files with 125 additions and 0 deletions.
13 changes: 13 additions & 0 deletions dev/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
## Developer tools

Hello there! So this directory just houses some tools that might come in handy for developing on `subwinder`.

### `fake_media.py`

So this little script manages creating a fake media given the expected hash and size (note this will create a file with that given size). This is useful to generate files that seem to be the desired media for searching with `Media`. This script takes the entries from `media_entries.json` and generates the media to the specified output location (**Note:** this produces a GBs of data so just take that into account).

```text
Example Usages:
./fake_media.py fake_media_entries.json
./fake_media.py --entry 0 --output-dir /tmp fake_media_entries.json
```
100 changes: 100 additions & 0 deletions dev/fake_media.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/usr/bin/env python
import argparse
import json
from pathlib import Path


def main():
HASH_SIZE = 8
MAX_HASH = 2 ** (HASH_SIZE * 8) - 1
MIN_FILE_SIZE = 128 * 10224

args = parse_args()

# Generate the fake media for each of the entries
for entry_index in args.entry:
entry = args.entry_file[entry_index]
output_file = args.output_dir / entry["name"]
hash = int(entry["hash"], 16)
size = entry["size"]

if size < MIN_FILE_SIZE:
raise ValueError(
f"Desired file is below minimum filesize of {MIN_FILE_SIZE} bytes"
)

# Determine the value for the first 8 bytes that will fake the desired hash
if size > hash:
contents = MAX_HASH - size + hash
else:
contents = hash - size

with output_file.open("wb") as file:
file.write(contents.to_bytes(HASH_SIZE, byteorder="little"))

# Write the remaining as zeros, chunked to avoid crazy RAM usage
remaining = size - HASH_SIZE
chunk = 16 * 1024
while remaining > chunk:
file.write((0).to_bytes(chunk, byteorder="little"))
remaining -= chunk

file.write((0).to_bytes(remaining, byteorder="little"))


def _json_file_arg(file):
with open(file) as f:
return json.load(f)


def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"-e",
"--entry",
action="append",
help="[Default: all] Then entry file index to generate a media for",
# Empty list is the magic value for all entries
default=[],
)
parser.add_argument(
"-o",
"--output-dir",
type=Path,
help="[Default: current directory] Directory to store generated media in",
default=Path.cwd(),
)
parser.add_argument(
"entry_file", type=_json_file_arg, help="Location of the entry file"
)

args = parser.parse_args()

# Deal with entries
if len(args.entry) == 0:
# Empty list means all entries
args.entry = range(len(args.entry_file))
else:
entries = []
for entry in args.entry:
# Ensure entry is an int
try:
entry = int(entry)
except ValueError:
raise ValueError(f"Entry should be an int, got {entry}")

# And within bounds
if entry >= len(args.entry_file) or entry < 0:
raise ValueError(
f"Entry {entry} extends outside entry_file bounds"
f" (0, {len(args.entry_file) - 1}"
)

entries.append(entry)
args.entry = entries

return args


if __name__ == "__main__":
main()
12 changes: 12 additions & 0 deletions dev/fake_media_entries.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[
{
"name": "Fringe - s04e03 - Alone in the World.dummy",
"hash": "0x18379ac9af039390",
"size": 366876694
},
{
"name": "Night Watch (2004).dummy",
"hash": "0x09a2c497663259cb",
"size": 733589504
}
]

0 comments on commit f079f70

Please sign in to comment.