mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-04-23 17:14:23 +00:00
meson: Add explicit check: kwarg to all run_command() calls
This is required since Meson 0.61.0, and causes a warning to be
emitted otherwise:
2c079d855e
https://github.com/mesonbuild/meson/issues/9300
This exposed a bunch of places where we had broken run_command()
calls, unnecessary run_command() calls, and places where check: true
should be used.
Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1507>
This commit is contained in:
parent
945fd11907
commit
1be6d6ccf5
33 changed files with 97 additions and 184 deletions
10
meson.build
10
meson.build
|
@ -24,13 +24,13 @@ ensure_not_uninstalled = '''
|
||||||
import os
|
import os
|
||||||
assert('GST_ENV' not in os.environ)
|
assert('GST_ENV' not in os.environ)
|
||||||
'''
|
'''
|
||||||
cmdres = run_command(python3, '-c', ensure_not_uninstalled)
|
cmdres = run_command(python3, '-c', ensure_not_uninstalled, check: false)
|
||||||
if cmdres.returncode() != 0
|
if cmdres.returncode() != 0
|
||||||
error('Do not run `ninja` or `meson` for gst-build inside the uninstalled environment, you will run into problems')
|
error('Do not run `ninja` or `meson` for gst-build inside the uninstalled environment, you will run into problems')
|
||||||
endif
|
endif
|
||||||
|
|
||||||
# Install gst-indent pre-commit hook
|
# Install gst-indent pre-commit hook
|
||||||
run_command(python3, '-c', 'import shutil; shutil.copy("scripts/git-hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")')
|
run_command(python3, '-c', 'import shutil; shutil.copy("scripts/git-hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")', check: false)
|
||||||
|
|
||||||
# Ensure that the user does not have Strawberry Perl in PATH, since it ships
|
# Ensure that the user does not have Strawberry Perl in PATH, since it ships
|
||||||
# with a pkg-config.bat and broken pkgconfig files for libffi and zlib. Will
|
# with a pkg-config.bat and broken pkgconfig files for libffi and zlib. Will
|
||||||
|
@ -41,7 +41,7 @@ import os
|
||||||
assert(r'Strawberry\perl\bin' not in os.environ['PATH'])
|
assert(r'Strawberry\perl\bin' not in os.environ['PATH'])
|
||||||
'''
|
'''
|
||||||
if build_system == 'windows' and meson.version().version_compare('<0.60.0')
|
if build_system == 'windows' and meson.version().version_compare('<0.60.0')
|
||||||
cmdres = run_command(python3, '-c', ensure_no_strawberry_perl)
|
cmdres = run_command(python3, '-c', ensure_no_strawberry_perl, check: false)
|
||||||
if cmdres.returncode() != 0
|
if cmdres.returncode() != 0
|
||||||
error('You have Strawberry Perl in PATH which is known to cause build issues with Meson < 0.60.0. Please remove it from PATH, uninstall it, or upgrade Meson.')
|
error('You have Strawberry Perl in PATH which is known to cause build issues with Meson < 0.60.0. Please remove it from PATH, uninstall it, or upgrade Meson.')
|
||||||
endif
|
endif
|
||||||
|
@ -53,9 +53,9 @@ documented_projects = ''
|
||||||
if not meson.is_subproject() and cc.get_id() == 'msvc'
|
if not meson.is_subproject() and cc.get_id() == 'msvc'
|
||||||
uname = find_program('uname', required: false)
|
uname = find_program('uname', required: false)
|
||||||
if uname.found()
|
if uname.found()
|
||||||
ret = run_command(uname, '-o')
|
ret = run_command(uname, '-o', check: false)
|
||||||
if ret.returncode() == 0 and ret.stdout().to_lower() == 'msys'
|
if ret.returncode() == 0 and ret.stdout().to_lower() == 'msys'
|
||||||
ret = run_command(uname, '-r')
|
ret = run_command(uname, '-r', check: false)
|
||||||
# The kernel version returned by uname is actually the msys version
|
# The kernel version returned by uname is actually the msys version
|
||||||
if ret.returncode() == 0 and ret.stdout().startswith('2')
|
if ret.returncode() == 0 and ret.stdout().startswith('2')
|
||||||
# If a system zlib is found, disable UNIX features in zlib.h and zconf.h
|
# If a system zlib is found, disable UNIX features in zlib.h and zconf.h
|
||||||
|
|
|
@ -78,8 +78,7 @@ icondir = join_paths(get_option('datadir'), 'icons/hicolor')
|
||||||
subdir('data')
|
subdir('data')
|
||||||
|
|
||||||
|
|
||||||
if run_command(python3,
|
if run_command(python3, '-c', 'import gi; gi.require_version("Gtk", "3.0")', check: false).returncode() == 0
|
||||||
'-c', 'import gi; gi.require_version("Gtk", "3.0")').returncode() == 0
|
|
||||||
test('gst-debug-viewer', python3, args: ['-m', 'unittest'],
|
test('gst-debug-viewer', python3, args: ['-m', 'unittest'],
|
||||||
workdir: meson.current_source_dir())
|
workdir: meson.current_source_dir())
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -172,5 +172,3 @@ if not get_option('debug_viewer').disabled()
|
||||||
subdir('debug-viewer')
|
subdir('debug-viewer')
|
||||||
endif
|
endif
|
||||||
subdir('docs')
|
subdir('docs')
|
||||||
|
|
||||||
run_command(python3, '-c', 'import shutil; shutil.copy("hooks/multi-pre-commit.hook", ".git/hooks/pre-commit")')
|
|
||||||
|
|
|
@ -9,7 +9,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.12.2'
|
hotdoc_req = '>= 0.12.2'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
endif
|
endif
|
||||||
|
@ -49,6 +49,7 @@ with open(fname, 'r') as f:
|
||||||
python3,
|
python3,
|
||||||
'-c', read_file_contents,
|
'-c', read_file_contents,
|
||||||
fname,
|
fname,
|
||||||
|
check: false,
|
||||||
)
|
)
|
||||||
if cmdres.returncode() == 0
|
if cmdres.returncode() == 0
|
||||||
built_subprojects = cmdres.stdout().strip()
|
built_subprojects = cmdres.stdout().strip()
|
||||||
|
|
|
@ -55,7 +55,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
@ -118,12 +118,8 @@ import json
|
||||||
|
|
||||||
with open("@0@") as f:
|
with open("@0@") as f:
|
||||||
print(':'.join(json.load(f).keys()), end='')
|
print(':'.join(json.load(f).keys()), end='')
|
||||||
'''.format(plugins_cache))
|
'''.format(plugins_cache),
|
||||||
|
check: true)
|
||||||
assert(list_plugin_res.returncode() == 0,
|
|
||||||
'Could not list plugins from @0@\n@1@\n@1@'.format(plugins_cache, list_plugin_res.stdout(), list_plugin_res.stderr()))
|
|
||||||
|
|
||||||
|
|
||||||
foreach plugin_name: list_plugin_res.stdout().split(':')
|
foreach plugin_name: list_plugin_res.stdout().split(':')
|
||||||
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
||||||
project_version: apiversion,
|
project_version: apiversion,
|
||||||
|
|
|
@ -174,18 +174,17 @@ elif build_gir
|
||||||
if not cc.compiles('#include <Python.h>', dependencies: [python_dep])
|
if not cc.compiles('#include <Python.h>', dependencies: [python_dep])
|
||||||
error_msg = 'Could not compile a simple program against python'
|
error_msg = 'Could not compile a simple program against python'
|
||||||
elif pylib_loc == ''
|
elif pylib_loc == ''
|
||||||
check_path_exists = 'import os, sys; assert(os.path.exists(sys.argv[1]))'
|
fsmod = import('fs')
|
||||||
pylib_loc = python.get_variable('LIBPL', '')
|
pylib_loc = python.get_variable('LIBPL', '')
|
||||||
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
|
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
|
||||||
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
|
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
|
||||||
if run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() != 0
|
if not fsmod.exists(pylib_loc / pylib_ldlibrary)
|
||||||
# Workaround for Fedora
|
# Workaround for Fedora
|
||||||
pylib_loc = python.get_variable('LIBDIR', '')
|
pylib_loc = python.get_variable('LIBDIR', '')
|
||||||
message('pylib_loc = @0@'.format(pylib_loc))
|
message('pylib_loc = @0@'.format(pylib_loc))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
res = run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary))
|
if not fsmod.exists(pylib_loc / pylib_ldlibrary)
|
||||||
if res.returncode() != 0
|
|
||||||
error_msg = '@0@ doesn\' exist, can\'t use python'.format(join_paths(pylib_loc, pylib_ldlibrary))
|
error_msg = '@0@ doesn\' exist, can\'t use python'.format(join_paths(pylib_loc, pylib_ldlibrary))
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
@ -310,7 +309,7 @@ else:
|
||||||
'''
|
'''
|
||||||
pygi_override_dir = get_option('pygi-overrides-dir')
|
pygi_override_dir = get_option('pygi-overrides-dir')
|
||||||
if pygi_override_dir == ''
|
if pygi_override_dir == ''
|
||||||
cres = run_command(python3, '-c', override_detector, get_option('prefix'))
|
cres = run_command(python3, '-c', override_detector, get_option('prefix'), check: false)
|
||||||
if cres.returncode() == 0
|
if cres.returncode() == 0
|
||||||
pygi_override_dir = cres.stdout().strip()
|
pygi_override_dir = cres.stdout().strip()
|
||||||
endif
|
endif
|
||||||
|
@ -327,15 +326,10 @@ endif
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-editing-services.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-editing-services.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if gio_dep.version().version_compare('< 2.67.4')
|
if gio_dep.version().version_compare('< 2.67.4')
|
||||||
|
|
|
@ -3,7 +3,7 @@ tests = [
|
||||||
]
|
]
|
||||||
|
|
||||||
check_test = find_program ('configure_test_check.py')
|
check_test = find_program ('configure_test_check.py')
|
||||||
have_webrtc_check_deps = run_command (check_test).returncode() == 0
|
have_webrtc_check_deps = run_command (check_test, check: false).returncode() == 0
|
||||||
|
|
||||||
if openssl.found() and have_webrtc_check_deps
|
if openssl.found() and have_webrtc_check_deps
|
||||||
test_deps = [certs]
|
test_deps = [certs]
|
||||||
|
|
|
@ -8,19 +8,16 @@ nuget = find_program('nuget.py')
|
||||||
dependencies = []
|
dependencies = []
|
||||||
foreach dependency, version: { 'Newtonsoft.Json': '11.0.2', 'WebSocketSharp': '1.0.3-rc11'}
|
foreach dependency, version: { 'Newtonsoft.Json': '11.0.2', 'WebSocketSharp': '1.0.3-rc11'}
|
||||||
message('Getting @0@:@1@'.format(dependency, version))
|
message('Getting @0@:@1@'.format(dependency, version))
|
||||||
get_dep= run_command(nuget, 'get',
|
get_dep = run_command(nuget, 'get',
|
||||||
'--builddir', dependency,
|
'--builddir', dependency,
|
||||||
'--nuget-name', dependency,
|
'--nuget-name', dependency,
|
||||||
'--nuget-version', version,
|
'--nuget-version', version,
|
||||||
'--csharp-version=net45',
|
'--csharp-version=net45',
|
||||||
'--current-builddir', meson.current_build_dir(),
|
'--current-builddir', meson.current_build_dir(),
|
||||||
'--builddir', meson.global_build_root(), # FIXME: --builddir specified twice?!
|
'--builddir', meson.global_build_root(), # FIXME: --builddir specified twice?!
|
||||||
|
check: true,
|
||||||
)
|
)
|
||||||
|
|
||||||
if get_dep.returncode() != 0
|
|
||||||
error('Failed to get @0@-@1@: @2@'.format(dependency, version, get_dep.stderr()))
|
|
||||||
endif
|
|
||||||
|
|
||||||
link_args = get_dep.stdout().split()
|
link_args = get_dep.stdout().split()
|
||||||
dependencies += [declare_dependency(link_args: link_args, version: version)]
|
dependencies += [declare_dependency(link_args: link_args, version: version)]
|
||||||
foreach path: get_dep.stdout().split()
|
foreach path: get_dep.stdout().split()
|
||||||
|
|
|
@ -37,7 +37,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
|
|
@ -207,15 +207,10 @@ subdir('tests')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-libav.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-libav.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
configure_file(output: 'config.h', configuration: cdata)
|
configure_file(output: 'config.h', configuration: cdata)
|
||||||
|
|
|
@ -415,15 +415,10 @@ subdir('docs')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-omx.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-omx.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
configure_file(output: 'config.h', configuration: cdata)
|
configure_file(output: 'config.h', configuration: cdata)
|
||||||
|
|
|
@ -57,7 +57,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
@ -215,10 +215,8 @@ import json
|
||||||
|
|
||||||
with open("@0@") as f:
|
with open("@0@") as f:
|
||||||
print(':'.join(json.load(f).keys()), end='')
|
print(':'.join(json.load(f).keys()), end='')
|
||||||
'''.format(plugins_cache))
|
'''.format(plugins_cache),
|
||||||
|
check: true)
|
||||||
assert(list_plugin_res.returncode() == 0,
|
|
||||||
'Could not list plugins from @0@'.format(plugins_cache))
|
|
||||||
|
|
||||||
plugins_doc = []
|
plugins_doc = []
|
||||||
foreach plugin_name: list_plugin_res.stdout().split(':')
|
foreach plugin_name: list_plugin_res.stdout().split(':')
|
||||||
|
|
|
@ -89,15 +89,15 @@ if opencv_found
|
||||||
|
|
||||||
# Check the data dir used by opencv for its xml data files
|
# Check the data dir used by opencv for its xml data files
|
||||||
# Use prefix from pkg-config to be compatible with cross-compilation
|
# Use prefix from pkg-config to be compatible with cross-compilation
|
||||||
r = run_command('test', '-d', opencv_prefix + '/share/opencv')
|
r = run_command('test', '-d', opencv_prefix + '/share/opencv', check: false)
|
||||||
if r.returncode() == 0
|
if r.returncode() == 0
|
||||||
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv"'
|
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv"'
|
||||||
else
|
else
|
||||||
r = run_command('test', '-d', opencv_prefix + '/share/OpenCV')
|
r = run_command('test', '-d', opencv_prefix + '/share/OpenCV', check: false)
|
||||||
if r.returncode() == 0
|
if r.returncode() == 0
|
||||||
gstopencv_cargs += '-DOPENCV_PATH_NAME="OpenCV"'
|
gstopencv_cargs += '-DOPENCV_PATH_NAME="OpenCV"'
|
||||||
else
|
else
|
||||||
r = run_command('test', '-d', opencv_prefix + '/share/opencv4')
|
r = run_command('test', '-d', opencv_prefix + '/share/opencv4', check: false)
|
||||||
if r.returncode() == 0
|
if r.returncode() == 0
|
||||||
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv4"'
|
gstopencv_cargs += '-DOPENCV_PATH_NAME="opencv4"'
|
||||||
else
|
else
|
||||||
|
|
|
@ -110,7 +110,7 @@ if ['ios', 'darwin'].contains(host_system)
|
||||||
# https://github.com/KhronosGroup/MoltenVK/issues/492
|
# https://github.com/KhronosGroup/MoltenVK/issues/492
|
||||||
vulkan_dep = cc.find_library('MoltenVK', required : get_option('vulkan'))
|
vulkan_dep = cc.find_library('MoltenVK', required : get_option('vulkan'))
|
||||||
elif host_system == 'windows'
|
elif host_system == 'windows'
|
||||||
vulkan_root = run_command(python3, '-c', 'import os; print(os.environ.get("VK_SDK_PATH"))').stdout().strip()
|
vulkan_root = run_command(python3, '-c', 'import os; print(os.environ.get("VK_SDK_PATH"))', check: false).stdout().strip()
|
||||||
if vulkan_root != '' and vulkan_root != 'None'
|
if vulkan_root != '' and vulkan_root != 'None'
|
||||||
vulkan_lib_dir = ''
|
vulkan_lib_dir = ''
|
||||||
if build_machine.cpu_family() == 'x86_64'
|
if build_machine.cpu_family() == 'x86_64'
|
||||||
|
|
|
@ -536,15 +536,10 @@ subdir('scripts')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-bad.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-bad.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if glib_dep.version().version_compare('< 2.67.4')
|
if glib_dep.version().version_compare('< 2.67.4')
|
||||||
|
|
|
@ -51,7 +51,7 @@ if mfx_api != 'oneVPL'
|
||||||
use_msdk = true
|
use_msdk = true
|
||||||
else
|
else
|
||||||
# Old versions of MediaSDK don't provide a pkg-config file
|
# Old versions of MediaSDK don't provide a pkg-config file
|
||||||
mfx_root = run_command(python3, '-c', 'import os; print(os.environ.get("INTELMEDIASDKROOT", os.environ.get("MFX_HOME", "")))').stdout().strip()
|
mfx_root = run_command(python3, '-c', 'import os; print(os.environ.get("INTELMEDIASDKROOT", os.environ.get("MFX_HOME", "")))', check: false).stdout().strip()
|
||||||
|
|
||||||
if mfx_root != ''
|
if mfx_root != ''
|
||||||
mfx_libdir = [mfx_root + '/lib/lin_x64', mfx_root + '/lib/x64', mfx_root + '/lib64', mfx_root + '/lib']
|
mfx_libdir = [mfx_root + '/lib/lin_x64', mfx_root + '/lib/x64', mfx_root + '/lib64', mfx_root + '/lib']
|
||||||
|
|
|
@ -58,7 +58,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
@ -202,12 +202,8 @@ import json
|
||||||
|
|
||||||
with open("@0@") as f:
|
with open("@0@") as f:
|
||||||
print(':'.join(json.load(f).keys()), end='')
|
print(':'.join(json.load(f).keys()), end='')
|
||||||
'''.format(plugins_cache))
|
'''.format(plugins_cache),
|
||||||
|
check: true)
|
||||||
assert(list_plugin_res.returncode() == 0,
|
|
||||||
'Could not list plugins from @0@\n@1@\n@1@'.format(plugins_cache, list_plugin_res.stdout(), list_plugin_res.stderr()))
|
|
||||||
|
|
||||||
|
|
||||||
foreach plugin_name: list_plugin_res.stdout().split(':')
|
foreach plugin_name: list_plugin_res.stdout().split(':')
|
||||||
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
||||||
project_version: api_version,
|
project_version: api_version,
|
||||||
|
|
|
@ -524,15 +524,10 @@ endif
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-base.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-base.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
core_conf.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
core_conf.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if gio_dep.version().version_compare('< 2.67.4')
|
if gio_dep.version().version_compare('< 2.67.4')
|
||||||
|
|
|
@ -39,7 +39,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
@ -83,11 +83,8 @@ import json
|
||||||
|
|
||||||
with open("@0@") as f:
|
with open("@0@") as f:
|
||||||
print(':'.join(json.load(f).keys()), end='')
|
print(':'.join(json.load(f).keys()), end='')
|
||||||
'''.format(plugins_cache))
|
'''.format(plugins_cache),
|
||||||
|
check: true)
|
||||||
assert(list_plugin_res.returncode() == 0,
|
|
||||||
'Could not list plugins from @0@'.format(plugins_cache))
|
|
||||||
|
|
||||||
foreach plugin_name: list_plugin_res.stdout().split(':')
|
foreach plugin_name: list_plugin_res.stdout().split(':')
|
||||||
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
||||||
project_version: api_version,
|
project_version: api_version,
|
||||||
|
|
|
@ -390,7 +390,7 @@ if host_cpu == 'x86_64'
|
||||||
if nasm.found()
|
if nasm.found()
|
||||||
# We can't use the version: kwarg for find_program because old versions
|
# We can't use the version: kwarg for find_program because old versions
|
||||||
# of nasm don't support --version
|
# of nasm don't support --version
|
||||||
ret = run_command(nasm, '-v')
|
ret = run_command(nasm, '-v', check: false)
|
||||||
if ret.returncode() == 0
|
if ret.returncode() == 0
|
||||||
nasm_version = ret.stdout().strip().split()[2]
|
nasm_version = ret.stdout().strip().split()[2]
|
||||||
nasm_req = '>=2.13'
|
nasm_req = '>=2.13'
|
||||||
|
@ -489,15 +489,10 @@ subdir('scripts')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-good.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-good.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if gio_dep.version().version_compare('< 2.67.4')
|
if gio_dep.version().version_compare('< 2.67.4')
|
||||||
|
|
|
@ -39,7 +39,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
@ -84,11 +84,8 @@ import json
|
||||||
|
|
||||||
with open("@0@") as f:
|
with open("@0@") as f:
|
||||||
print(':'.join(json.load(f).keys()), end='')
|
print(':'.join(json.load(f).keys()), end='')
|
||||||
'''.format(plugins_cache))
|
'''.format(plugins_cache),
|
||||||
|
check: true)
|
||||||
assert(list_plugin_res.returncode() == 0,
|
|
||||||
'Could not list plugins from @0@'.format(plugins_cache))
|
|
||||||
|
|
||||||
foreach plugin_name: list_plugin_res.stdout().split(':')
|
foreach plugin_name: list_plugin_res.stdout().split(':')
|
||||||
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
plugins_doc += [hotdoc.generate_doc(plugin_name,
|
||||||
project_version: api_version,
|
project_version: api_version,
|
||||||
|
|
|
@ -296,15 +296,10 @@ subdir('scripts')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-ugly.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-plugins-ugly.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
configure_file(output : 'config.h', configuration : cdata)
|
configure_file(output : 'config.h', configuration : cdata)
|
||||||
|
|
|
@ -34,20 +34,19 @@ python_dep = python.dependency(embed:true, required : true)
|
||||||
python_abi_flags = python.get_variable('ABIFLAGS', '')
|
python_abi_flags = python.get_variable('ABIFLAGS', '')
|
||||||
pylib_loc = get_option('libpython-dir')
|
pylib_loc = get_option('libpython-dir')
|
||||||
if pylib_loc == ''
|
if pylib_loc == ''
|
||||||
check_path_exists = 'import os, sys; assert(os.path.exists(sys.argv[1]))'
|
fsmod = import('fs')
|
||||||
pylib_loc = python.get_variable('LIBPL', '')
|
pylib_loc = python.get_variable('LIBPL', '')
|
||||||
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
|
if host_machine.system() != 'windows' and host_machine.system() != 'darwin'
|
||||||
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
|
pylib_ldlibrary = python.get_variable('LDLIBRARY', '')
|
||||||
if run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() != 0
|
if not fsmod.exists(pylib_loc / pylib_ldlibrary)
|
||||||
# Workaround for Fedora
|
# Workaround for Fedora
|
||||||
pylib_loc = python.get_variable('LIBDIR', '')
|
pylib_loc = python.get_variable('LIBDIR', '')
|
||||||
message('pylib_loc = @0@'.format(pylib_loc))
|
message('pylib_loc = @0@'.format(pylib_loc))
|
||||||
endif
|
endif
|
||||||
|
|
||||||
assert(
|
if not fsmod.exists(pylib_loc / pylib_ldlibrary)
|
||||||
run_command(python, '-c', check_path_exists, join_paths(pylib_loc, pylib_ldlibrary)).returncode() == 0,
|
error('Python dynamic library path could not be determined')
|
||||||
'Python dynamic library path could not be determined'
|
endif
|
||||||
)
|
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
|
|
@ -10,21 +10,14 @@ pluginsdirs = []
|
||||||
if not meson.is_subproject()
|
if not meson.is_subproject()
|
||||||
pkgconfig = find_program('pkg-config')
|
pkgconfig = find_program('pkg-config')
|
||||||
runcmd = run_command(pkgconfig, '--variable=pluginsdir',
|
runcmd = run_command(pkgconfig, '--variable=pluginsdir',
|
||||||
'gstreamer-' + api_version)
|
'gstreamer-' + api_version, check: true)
|
||||||
if runcmd.returncode() == 0
|
pluginsdirs = runcmd.stdout().split()
|
||||||
pluginsdirs = runcmd.stdout().split()
|
|
||||||
else
|
|
||||||
error('Could not determine GStreamer core plugins directory for unit tests.')
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
runcmd = run_command(python, '-c', '''with open("@0@/mesonconfig.py", "w") as f:
|
runcmd = run_command(python, '-c', '''with open("@0@/mesonconfig.py", "w") as f:
|
||||||
f.write("path='@1@'")'''.format(
|
f.write("path='@1@'")'''.format(
|
||||||
join_paths(meson.current_build_dir()), join_paths(meson.current_build_dir(), '..')))
|
join_paths(meson.current_build_dir()), join_paths(meson.current_build_dir(), '..')),
|
||||||
|
check: true)
|
||||||
if runcmd.returncode() != 0
|
|
||||||
error('Could not configure testsuite config file.' + runcmd.stderr())
|
|
||||||
endif
|
|
||||||
|
|
||||||
pluginsdirs = []
|
pluginsdirs = []
|
||||||
if gst_dep.type_name() == 'pkgconfig'
|
if gst_dep.type_name() == 'pkgconfig'
|
||||||
|
|
|
@ -57,7 +57,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
|
|
@ -208,15 +208,10 @@ subdir('docs')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gst-rtsp-server.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gst-rtsp-server.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
configure_file(output: 'config.h', configuration: cdata)
|
configure_file(output: 'config.h', configuration: cdata)
|
||||||
|
|
|
@ -13,22 +13,19 @@ if nunit_console.found()
|
||||||
'--csharp-version=net45',
|
'--csharp-version=net45',
|
||||||
'--current-builddir', meson.current_build_dir(),
|
'--current-builddir', meson.current_build_dir(),
|
||||||
'--builddir', meson.build_root(), # FIXME: --builddir specified twice?!
|
'--builddir', meson.build_root(), # FIXME: --builddir specified twice?!
|
||||||
|
check: true,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
nunit_mono_path = []
|
nunit_mono_path = []
|
||||||
nunit_dep = dependency('mono-nunit', required: false, version: ['>=2.6', '< 2.7'])
|
nunit_dep = dependency('mono-nunit', required: false, version: ['>=2.6', '< 2.7'])
|
||||||
if not nunit_dep.found()
|
if not nunit_dep.found()
|
||||||
if get_nunit_res.returncode() != 0
|
foreach path: get_nunit_res.stdout().split()
|
||||||
message('Failed to get NUnit: ' + get_nunit_res.stderr())
|
nunit_mono_path += [meson.build_root() / path.strip('-r:') / '..']
|
||||||
else
|
endforeach
|
||||||
foreach path: get_nunit_res.stdout().split()
|
|
||||||
nunit_mono_path += [join_paths(meson.build_root(), path.strip('-r:'), '..')]
|
|
||||||
endforeach
|
|
||||||
|
|
||||||
nunit_dep = declare_dependency(link_args: get_nunit_res.stdout().split(),
|
nunit_dep = declare_dependency(link_args: get_nunit_res.stdout().split(),
|
||||||
version: nunit_version)
|
version: nunit_version)
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if nunit_mono_path.length() > 0
|
if nunit_mono_path.length() > 0
|
||||||
|
|
|
@ -37,7 +37,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
|
|
@ -204,15 +204,10 @@ subdir('docs')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gstreamer-vaapi.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gstreamer-vaapi.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
if gmodule_dep.version().version_compare('< 2.67.4')
|
if gmodule_dep.version().version_compare('< 2.67.4')
|
||||||
|
|
|
@ -42,7 +42,7 @@ if not hotdoc_p.found()
|
||||||
endif
|
endif
|
||||||
|
|
||||||
hotdoc_req = '>= 0.11.0'
|
hotdoc_req = '>= 0.11.0'
|
||||||
hotdoc_version = run_command(hotdoc_p, '--version').stdout()
|
hotdoc_version = run_command(hotdoc_p, '--version', check: false).stdout()
|
||||||
if not hotdoc_version.version_compare(hotdoc_req)
|
if not hotdoc_version.version_compare(hotdoc_req)
|
||||||
if get_option('doc').enabled()
|
if get_option('doc').enabled()
|
||||||
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
error('Hotdoc version @0@ not found, got @1@'.format(hotdoc_req, hotdoc_version))
|
||||||
|
|
|
@ -33,11 +33,7 @@ bison_cdata = configuration_data()
|
||||||
bison_min_version='2.4'
|
bison_min_version='2.4'
|
||||||
bison = find_program('bison', 'win_bison')
|
bison = find_program('bison', 'win_bison')
|
||||||
|
|
||||||
bversion_res = run_command([bison, '--version'])
|
bversion_res = run_command([bison, '--version'], check: true)
|
||||||
if bversion_res.returncode() != 0
|
|
||||||
error('Could not get bison version (@0@)'.format(bversion_res.stderr()))
|
|
||||||
endif
|
|
||||||
|
|
||||||
bversion = bversion_res.stdout().split('\n')[0].split(' ')[-1].strip()
|
bversion = bversion_res.stdout().split('\n')[0].split(' ')[-1].strip()
|
||||||
if bversion.version_compare('<' + bison_min_version)
|
if bversion.version_compare('<' + bison_min_version)
|
||||||
error('bison version @0@ >= @1@: NO'.format(bversion, bison_min_version))
|
error('bison version @0@ >= @1@: NO'.format(bversion, bison_min_version))
|
||||||
|
|
|
@ -608,15 +608,10 @@ subdir('scripts')
|
||||||
# Set release date
|
# Set release date
|
||||||
if gst_version_nano == 0
|
if gst_version_nano == 0
|
||||||
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
extract_release_date = find_program('scripts/extract-release-date-from-doap-file.py')
|
||||||
run_result = run_command(extract_release_date, gst_version, files('gstreamer.doap'))
|
run_result = run_command(extract_release_date, gst_version, files('gstreamer.doap'), check: true)
|
||||||
if run_result.returncode() == 0
|
release_date = run_result.stdout().strip()
|
||||||
release_date = run_result.stdout().strip()
|
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
||||||
cdata.set_quoted('GST_PACKAGE_RELEASE_DATETIME', release_date)
|
message('Package release date: ' + release_date)
|
||||||
message('Package release date: ' + release_date)
|
|
||||||
else
|
|
||||||
# Error out if our release can't be found in the .doap file
|
|
||||||
error(run_result.stderr())
|
|
||||||
endif
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
configure_file(output : 'config.h', configuration : cdata)
|
configure_file(output : 'config.h', configuration : cdata)
|
||||||
|
|
|
@ -8,7 +8,7 @@ endif
|
||||||
root_rel = '../..'
|
root_rel = '../..'
|
||||||
python = import('python').find_installation()
|
python = import('python').find_installation()
|
||||||
|
|
||||||
if run_command(python, '-c', 'import gi').returncode() != 0
|
if run_command(python, '-c', 'import gi', check: false).returncode() != 0
|
||||||
message('PyGObject not found, not running PyGObject tests')
|
message('PyGObject not found, not running PyGObject tests')
|
||||||
subdir_done()
|
subdir_done()
|
||||||
endif
|
endif
|
||||||
|
|
Loading…
Reference in a new issue