summary refs log tree commit diff stats
path: root/doc/tools/convert_papermode_to_metadata.py
blob: a1d6372d051e043e61f0902fa523482bf6dd5442 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#!/bin/python
"""
usage: ./convert_papermode_to_metadata.py

This script converts the .paperinfo CSV file in the current directory to an
equivalent .metadata.json file.

ranger used to store metadata in .paperinfo files, but that format was rather
limited, so .metadata.json files were introduced.
"""

import csv
import json
import os
import sys

if sys.version < '3.':
    getuserinput = raw_input
else:
    getuserinput = input

FIELDS = ["name", "year", "title", "authors", "url"]

def replace(source, target):
    if not os.path.exists(source):
        print("Source file `%s' doesn't exist, skipping." % source)
        return

    # Ask for user confirmation if the target file already exists
    if os.path.exists(target):
        sys.stdout.write("Warning: target file `%s' exists! Overwrite? [y/N]")
        userinput = getuserinput()
        if not (userinput.startswith("y") or userinput.startswith("Y")):
            print("Skipping file `%s'" % source)
            return

    result = dict()

    # Read the input file and convert it to a dictionary
    with open(".paperinfo", "r") as infile:
        reader = csv.reader(infile, skipinitialspace=True)
        for lineno, row in enumerate(reader):
            if len(row) != len(FIELDS):
                print("skipping invalid row `%s' on line %d" % (row, lineno))
                continue
            name = row[0]
            entry = {}

            # Filling up the resulting entry dict
            for i, column in enumerate(row[1:]):
                if column:
                    entry[FIELDS[i + 1]] = column

            # Adding the dict if it isn't empty
            if entry:
                result[name] = entry

    # Write the obtained dictionary into the target file
    if result:
        with open(".metadata.json", "w") as outfile:
            json.dump(result, outfile, indent=2)
    else:
        print("Skipping writing `%s' due to a lack of data" % target)

if __name__ == "__main__":
    if set(['--help', '-h']) & set(sys.argv[1:]):
        print(__doc__.strip())
    else:
        replace(".paperinfo", ".metadata.json")
pan>" ); } :(before "End Op ff Subops") case 2: { // call function pointer at r/m32 trace(Callstack_depth+1, "run") << "call to r/m32" << end(); const int32_t* offset = effective_address(modrm); push(EIP); EIP = *offset; trace(Callstack_depth+1, "run") << "jumping to 0x" << HEXWORD << EIP << end(); ++Callstack_depth; break; } :(code) void test_call_mem_at_rm32() { Mem.push_back(vma(0xbd000000)); // manually allocate memory Reg[ESP].u = 0xbd000064; Reg[EBX].u = 0x2000; run( "== code 0x1\n" // op ModR/M SIB displacement immediate " ff 13 \n" // call function offset at *EBX // next EIP is 3 "== data 0x2000\n" "a0 00 00 00\n" // 0xa0 ); CHECK_TRACE_CONTENTS( "run: call to r/m32\n" "run: effective address is 0x00002000 (EBX)\n" "run: decrementing ESP to 0xbd000060\n" "run: pushing value 0x00000003\n" "run: jumping to 0x000000a0\n" ); } //:: ret :(before "End Initialize Op Names") put_new(Name, "c3", "return from most recent unfinished call (ret)"); :(code) void test_ret() { Mem.push_back(vma(0xbd000000)); // manually allocate memory Reg[ESP].u = 0xbd000064; write_mem_u32(Reg[ESP].u, 0x10); run( "== code 0x1\n" // op ModR/M SIB displacement immediate " c3 \n" // return "== data 0x2000\n" "10 00 00 00\n" // 0x10 ); CHECK_TRACE_CONTENTS( "run: return\n" "run: popping value 0x00000010\n" "run: jumping to 0x00000010\n" ); } :(before "End Single-Byte Opcodes") case 0xc3: { // return from a call trace(Callstack_depth+1, "run") << "return" << end(); --Callstack_depth; EIP = pop(); trace(Callstack_depth+1, "run") << "jumping to 0x" << HEXWORD << EIP << end(); break; }