gstreamer/docs/gst-plugins-doc-cache-generator.py

136 lines
4.9 KiB
Python
Raw Normal View History

2018-10-22 06:14:11 +00:00
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright © 2018 Thibault Saunier <tsaunier@igalia.com>
#
# This library is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
import argparse
import json
import os
import sys
import re
2018-10-22 06:14:11 +00:00
import subprocess
import tempfile
2018-10-22 06:14:11 +00:00
from collections import OrderedDict
try:
from collections.abc import Mapping
except ImportError: # python <3.3
from collections import Mapping
# Marks values in the json file as "unstable" so that they are
# not updated automatically, this aims at making the cache file
# stable and handle corner cases were we can't automatically
# make it happen. For properties, the best way is to use th
# GST_PARAM_DOC_SHOW_DEFAULT flag.
UNSTABLE_VALUE = "unstable-values"
2018-10-22 06:14:11 +00:00
def dict_recursive_update(d, u):
modified = False
2018-10-22 06:14:11 +00:00
unstable_values = d.get(UNSTABLE_VALUE, [])
if not isinstance(unstable_values, list):
unstable_values = [unstable_values]
for k, v in u.items():
if isinstance(v, Mapping):
r = d.get(k, {})
modified |= dict_recursive_update(r, v)
2018-10-22 06:14:11 +00:00
d[k] = r
elif k not in unstable_values:
modified = True
if k == "package":
d[k] = re.sub(" git$| source release$| prerelease$", "", v)
else:
d[k] = u[k]
return modified
2018-10-22 06:14:11 +00:00
def test_unstable_values():
current_cache = { "v1": "yes", "unstable-values": "v1"}
new_cache = { "v1": "no" }
assert(dict_recursive_update(current_cache, new_cache) == False)
2018-10-22 06:14:11 +00:00
new_cache = { "v1": "no", "unstable-values": "v2" }
assert(dict_recursive_update(current_cache, new_cache) == True)
2018-10-22 06:14:11 +00:00
current_cache = { "v1": "yes", "v2": "yay", "unstable-values": "v1",}
new_cache = { "v1": "no" }
assert(dict_recursive_update(current_cache, new_cache) == False)
2018-10-22 06:14:11 +00:00
current_cache = { "v1": "yes", "v2": "yay", "unstable-values": "v2"}
new_cache = { "v1": "no", "v2": "unstable" }
assert (dict_recursive_update(current_cache, new_cache) == True)
assert (current_cache == { "v1": "no", "v2": "yay", "unstable-values": "v2" })
2018-10-22 06:14:11 +00:00
if __name__ == "__main__":
cache_filename = sys.argv[1]
output_filename = sys.argv[2]
build_root = os.environ.get('MESON_BUILD_ROOT', '')
2018-10-22 06:14:11 +00:00
subenv = os.environ.copy()
cache = {}
try:
with open(cache_filename, newline='\n', encoding='utf8') as f:
2018-10-22 06:14:11 +00:00
cache = json.load(f)
except FileNotFoundError:
pass
out = output_filename + '.tmp'
cmd = [os.path.join(os.path.dirname(os.path.realpath(__file__)), 'gst-hotdoc-plugins-scanner'), out]
2018-10-22 06:14:11 +00:00
gst_plugins_paths = []
for plugin_path in sys.argv[3:]:
cmd.append(plugin_path)
gst_plugins_paths.append(os.path.dirname(plugin_path))
try:
with open(os.path.join(build_root, 'GstPluginsPath.json'), newline='\n', encoding='utf8') as f:
plugin_paths = os.pathsep.join(json.load(f))
except FileNotFoundError:
plugin_paths = ""
if plugin_paths:
subenv['GST_PLUGIN_PATH'] = subenv.get('GST_PLUGIN_PATH', '') + ':' + plugin_paths
# Hide stderr unless an actual error happens as we have cases where we get g_warnings
# and other issues because plugins are being built while `gst_init` is called
stderrlogfile = output_filename + '.stderr'
with open(stderrlogfile, 'w', encoding='utf8') as log:
2018-10-22 06:14:11 +00:00
try:
data = subprocess.check_output(cmd, env=subenv, stderr=log, encoding='utf8', universal_newlines=True)
except subprocess.CalledProcessError as e:
log.flush()
with open(stderrlogfile, 'r', encoding='utf8') as f:
print(f.read(), file=sys.stderr)
2018-10-22 06:14:11 +00:00
raise
with open(out, 'r', newline='\n', encoding='utf8') as jfile:
try:
plugins = json.load(jfile, object_pairs_hook=OrderedDict)
except json.decoder.JSONDecodeError:
print("Could not decode:\n%s" % jfile.read(), file=sys.stderr)
raise
2018-10-22 06:14:11 +00:00
modified = dict_recursive_update(cache, plugins)
2018-10-22 06:14:11 +00:00
with open(output_filename, 'w', newline='\n', encoding='utf8') as f:
json.dump(cache, f, indent=4, sort_keys=True, ensure_ascii=False)
2018-10-22 06:14:11 +00:00
if modified:
with open(cache_filename, 'w', newline='\n', encoding='utf8') as f:
json.dump(cache, f, indent=4, sort_keys=True, ensure_ascii=False)