about summary refs log tree commit diff stats
path: root/ranger/core/metadata.py
blob: 1d0e1cd5aebe6ab9167a0d22a548d19dfba28de9 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
# This file is part of ranger, the console file manager.
# License: GNU GPL version 3, see the file "AUTHORS" for details.

"""
A Metadata Manager that reads information about files from a json database.

The database is contained in a local .metadata.json file.
"""

# TODO: Better error handling if a json file can't be decoded
# TODO: Update metadata keys if a file gets renamed/moved
# TODO: A global metadata file, maybe as a replacement for tags

import copy
from os.path import join, dirname, exists, basename
from ranger.ext.openstruct import DefaultOpenStruct as ostruct


METADATA_FILE_NAME = ".metadata.json"
DEEP_SEARCH_DEFAULT = False


class MetadataManager(object):

    def __init__(self):
        # metadata_cache maps filenames to dicts containing their metadata
        self.metadata_cache = dict()
        # metafile_cache maps .metadata.json filenames to their entries
        self.metafile_cache = dict()
        self.deep_search = DEEP_SEARCH_DEFAULT

    def reset(self):
        self.metadata_cache.clear()
        self.metafile_cache.clear()

    def get_metadata(self, filename):
        try:
            return ostruct(copy.deepcopy(self.metadata_cache[filename]))
        except KeyError:
            try:
                return ostruct(copy.deepcopy(self._get_entry(filename)))
            except KeyError:
                return ostruct()

    def set_metadata(self, filename, update_dict):
        if not self.deep_search:
            metafile = next(self._get_metafile_names(filename))
            return self._set_metadata_raw(filename, update_dict, metafile)

        metafile = self._get_metafile_name(filename)
        return self._set_metadata_raw(filename, update_dict, metafile)

    def _set_metadata_raw(self, filename, update_dict, metafile):
        import json

        entries = self._get_metafile_content(metafile)
        try:
            entry = entries[filename]
        except KeyError:
            try:
                entry = entries[basename(filename)]
            except KeyError:
                entry = entries[basename(filename)] = {}
        entry.update(update_dict)

        # Delete key if the value is empty
        for key, value in update_dict.items():
            if value == "":
                del entry[key]

        # If file's metadata become empty after an update, remove it entirely
        if entry == {}:
            try:
                del entries[filename]
            except KeyError:
                try:
                    del entries[basename(filename)]
                except KeyError:
                    pass

        # Full update of the cache, to be on the safe side:
        self.metadata_cache[filename] = entry
        self.metafile_cache[metafile] = entries

        with open(metafile, "w") as fobj:
            json.dump(entries, fobj, check_circular=True, indent=2)

    def _get_entry(self, filename):
        if filename in self.metadata_cache:
            return self.metadata_cache[filename]
        else:

            # Try to find an entry for this file in any of
            # the applicable .metadata.json files
            for metafile in self._get_metafile_names(filename):
                entries = self._get_metafile_content(metafile)
                # Check for a direct match:
                if filename in entries:
                    entry = entries[filename]
                # Check for a match of the base name:
                elif basename(filename) in entries:
                    entry = entries[basename(filename)]
                else:
                    # No match found, try another entry
                    continue

                self.metadata_cache[filename] = entry
                return entry

            raise KeyError

    def _get_metafile_content(self, metafile):
        import json
        if metafile in self.metafile_cache:
            return self.metafile_cache[metafile]
        else:
            if exists(metafile):
                with open(metafile, "r") as fobj:
                    try:
                        entries = json.load(fobj)
                    except ValueError:
                        raise ValueError("Failed decoding JSON file %s" %
                                         metafile)
                self.metafile_cache[metafile] = entries
                return entries
            else:
                return {}

    def _get_metafile_names(self, path):
        # Iterates through the paths of all .metadata.json files that could
        # influence the metadata of the given file.
        # When deep_search is deactivated, this only yields the .metadata.json
        # file in the same directory as the given file.

        base = dirname(path)
        yield join(base, METADATA_FILE_NAME)
        if self.deep_search:
            dirs = base.split("/")[1:]
            for i in reversed(range(len(dirs))):
                yield join("/" + "/".join(dirs[0:i]), METADATA_FILE_NAME)

    def _get_metafile_name(self, filename):
        first = None
        for metafile in self._get_metafile_names(filename):
            if first is None:
                first = metafile

            entries = self._get_metafile_content(metafile)
            if filename in entries or basename(filename) in entries:
                return metafile

        # _get_metafile_names should return >0 names, but just in case...:
        assert first is not None, "failed finding location for .metadata.json"
        return first