+++ /dev/null
-{
- "lttngv/test/test_base.py": true,
- "tests/test_base.py": true
-}
\ No newline at end of file
__pycache__/
*.py[cod]
*$py.class
-git_remote/
+runtime/
# C extensions
*.so
import os
import yaml
import logging
-import urllib.parse
+import hashlib
from git import Repo
-default_git_remote_dir = "./git_remote"
-
+import settings as Settings
def is_ref_branch(repo, ref):
try:
logger_git = logging.getLogger('setup.git')
# Fetch local base repository
-with open("config.yaml", 'r') as stream:
+with open(Settings.configuration_file, 'r') as stream:
config = yaml.load(stream)
+# Validate that all default dependancy are present.
+# TODO: move to function
+projects_markers = set()
+for project, markers in config.items():
+ if markers is None:
+ continue
+ for marker in markers:
+ projects_markers.add(marker['marker'])
+
+for project, markers in config.items():
+ if markers is None:
+ continue
+ for marker in markers:
+ if 'precook_deps' in marker:
+ for dep in marker['precook_deps']:
+ if dep not in projects_markers:
+ raise Exception("{} is not defined".format(dep))
+
+
# Retrieve all possibles remotes and clean url for path
remotes = {}
for project, markers in config.items():
continue
for marker in markers:
url = marker['url']
- url2path = urllib.parse.quote_plus(url)
- path = os.path.abspath(default_git_remote_dir + '/' + url2path)
+ url2path = hashlib.sha1(url.encode('utf8')).hexdigest()
+ path = os.path.abspath(Settings.git_remote_folder + '/' + url2path)
remotes[url] = path
logger_git.info('Remotes to be fetched {}'.format(remotes))
-if not os.path.isdir(default_git_remote_dir):
- os.mkdir(default_git_remote_dir)
+if not os.path.isdir(Settings.git_remote_folder):
+ os.makedirs(Settings.git_remote_folder)
# Fetch the remote
for url, path in remotes.items():
name = marker['marker']
ref = marker['ref']
url = marker['url']
+ if 'precook_deps' in marker:
+ deps = marker['precook_deps']
+ else:
+ deps = []
+
path = remotes[url]
repo = Repo(path)
'project': project,
'sha1': git_object.hexsha,
'url': url,
- 'path': path
+ 'path': path,
+ 'deps': deps
}
-with open('run_configuration.yaml', 'w') as run_configuration:
+with open(Settings.run_configuration_file, 'w') as run_configuration:
yaml.dump(runnable_markers, run_configuration, default_flow_style=False)
# All tests are run if empty
+import os
+
test_only = {"lttng-ust-2.7"}
+
+configuration_file = os.path.dirname(os.path.abspath(__file__)) + "/config.yaml"
+run_configuration_file = os.path.dirname(os.path.abspath(__file__)) + "/run_configuration.yaml"
+
+projects_cache_folder = os.path.dirname(os.path.abspath(__file__)) + "/runtime/projects_cache"
+git_remote_folder = os.path.dirname(os.path.abspath(__file__)) + "/runtime/git_remote"
import subprocess
import lttng_ivc.utils.ProjectFactory as ProjectFactory
-import lttng_ivc.settings as project_settings
+import lttng_ivc.settings as Settings
"""
TODO: Document how the tests is donne and what it tests
]
runtime_matrix_label = []
-if not project_settings.test_only:
+if not Settings.test_only:
runtime_matrix_label = test_matrix_label
else:
for tup in test_matrix_label:
ust_label, tools_label = tup[0], tup[1]
- if (ust_label in project_settings.test_only or tools_label in
- project_settings.test_only):
+ if (ust_label in Settings.test_only or tools_label in
+ Settings.test_only):
runtime_matrix_label.append(tup)
ust.autobuild()
- tools.dependencies.append(ust)
+ tools.dependencies['custom-ust'] = ust
# TODO: Propose fixes to upstream regarding the check
if not should_pass:
# Making sure we get a error here
@pytest.mark.parametrize("ust_label,tools_label,base_tools_ust_dep,should_pass", runtime_matrix_label)
def test_soname_build(tmpdir, ust_label, tools_label, base_tools_ust_dep, should_pass):
- ust = ProjectFactory.get(ust_label, str(tmpdir.mkdir("lttng-ust")))
- tools = ProjectFactory.get(tools_label, str(tmpdir.mkdir("lttng-tools")))
- ust_configure_mockup = ProjectFactory.get(ust_label, str(tmpdir.mkdir("lttng-ust-base")))
+ ust = ProjectFactory.get_fresh(ust_label, str(tmpdir.mkdir("lttng-ust")))
+ tools = ProjectFactory.get_fresh(tools_label,
+ str(tmpdir.mkdir("lttng-tools")))
+ ust_configure_mockup = ProjectFactory.get_fresh(ust_label,
+ str(tmpdir.mkdir("lttng-ust-base")))
ust.autobuild()
ust_configure_mockup.autobuild()
# Fool configure
- tools.dependencies.append(ust_configure_mockup)
+ tools.dependencies['custom-ust'] = ust_configure_mockup
tools.configure()
# Use ust under test
import os
import logging
import yaml
+import pickle
import lttng_ivc.utils.project as Project
+import lttng_ivc.settings as Settings
_logger = logging.getLogger('project.factory')
-_conf_file = os.path.dirname(os.path.abspath(__file__)) + "/../run_configuration.yaml"
_project_constructor = {
'babeltrace': Project.Babeltrace,
'lttng-modules': Project.Lttng_modules,
'lttng-ust': Project.Lttng_ust,
}
+__projects_cache = {}
+
_markers = None
-with open(_conf_file, 'r') as stream:
- # This is voluntary static across call, no need to perform this
+with open(Settings.run_configuration_file, 'r') as stream:
+ # This is voluntary static across calls, no need to perform this
# every time.
_markers = yaml.load(stream)
-def get(label, tmpdir):
+def get_fresh(label, tmpdir):
if label not in _markers:
# TODO: specialized exception, handle it caller-side so the caller
# can decide to skip or fail test.
path = marker['path']
sha1 = marker['sha1']
return constructor(label, path, sha1, tmpdir)
+
+
+def _validate_pickle(pickle, label):
+ _logger.warn("Checking validate for {} {}".format(pickle,
+ label))
+ if pickle.label != label:
+ _logger.warn("Label {} and {} are not the same".format(pickle.label,
+ label))
+ return False
+ if pickle.sha1 != _markers[label]['sha1']:
+ _logger.warn("Sha1 {} and {} are not the same".format(pickle.sha1,
+ _markers[label]['sha1']))
+ return False
+
+ deps = _markers[label]['deps']
+ if len(deps) != len(pickle.dependencies):
+ _logger.warn("Len {} and {} are not the same".format(len(deps),
+ len(pickle.dependencies)))
+ return False
+ for dep in deps:
+ if dep not in pickle.dependencies:
+ _logger.warn("Dep {} is not in {}".format(dep,
+ pickle.dependencies))
+ return False
+ else:
+ _logger.debug("Calling validate {} {}".format(pickle.dependencies[dep],
+ dep))
+ valid = _validate_pickle(pickle.dependencies[dep], dep)
+ if not valid:
+ return False
+ return True
+
+
+def get_precook(label):
+ """
+ Retrieve a precooked immutable projects from a cache if present
+ otherwise the project is built, installed and cached for future access.
+ """
+ if label not in _markers:
+ # TODO: specialized exception, handle it caller-side so the caller
+ # can decide to skip or fail test.
+ raise Exception('Label is no present')
+ marker = _markers[label]
+ constructor = _project_constructor[marker['project']]
+ path = marker['path']
+ sha1 = marker['sha1']
+ deps = marker['deps']
+
+ # Cache path for the label
+ cache_path = os.path.join(Settings.projects_cache_folder, label)
+ pickle_path = os.path.join(cache_path, label+".pickle")
+
+ # Check if Pickle Rick is present and valid. If so return it asap.
+ if os.path.exists(pickle_path):
+ with open(pickle_path, 'rb') as pickle_file:
+ pickled = pickle.load(pickle_file)
+ if _validate_pickle(pickled, label):
+ return pickled
+ else:
+ pickled.cleanup()
+ _logger.warn("Pickle for {} is invalid. Rebuilding".format(label))
+
+ project = constructor(label, path, sha1, cache_path)
+
+ for dep in deps:
+ obj_dep = get_precook(dep)
+ project.dependencies[dep] = obj_dep
+
+ project.autobuild()
+ project._immutable = True
+ with open(pickle_path, 'wb') as pickle_file:
+ pickle.dump(project, pickle_file)
+
+ return project
import subprocess
import logging
+_logger = logging.getLogger('project')
class Project(object):
self.custom_configure_flags.append("CXX={} g++".format(ccache))
""" A collection of Project dependencies """
- self.dependencies = []
+ self.dependencies = {}
+ self._immutable = False
# State
- self.isCheckedOut = False
- self.isBootStrapped = False
self.isBuilt = False
self.isConfigured = False
self.isInstalled = False
- self.source_path = tmpdir + "/source"
- self.installation_path = tmpdir + "/install"
+ self.basedir = tmpdir
+ self.log_path = os.path.join(tmpdir, "log")
+ self.source_path = os.path.join(tmpdir, "source")
+ self.installation_path = os.path.join(tmpdir, "install")
+
+ os.makedirs(self.log_path)
os.makedirs(self.source_path)
os.makedirs(self.installation_path)
- self.logger = logging.getLogger('project.{}'.format(self.label))
self.special_env_variables = {}
for var, value in self.special_env_variables.items():
if var in env:
- # TODO: WARNING log point
# Raise for now since no special cases is known
- self.logger.warning("Special var % is already defined", var)
+ _logger.warning("% Special var % is already defined",
+ self.label, var)
raise Exception("Multiple definition of a special environment variable")
else:
env[var] = value
- for dep in self.dependencies:
+ for key, dep in self.dependencies.items():
# Extra space just in case
cpp_flags += " {}".format(dep.get_cppflags())
ld_flags += " {}".format(dep.get_ldflags())
ld_library_path += "{}:".format(dep.get_ld_library_path())
for var, value in dep.special_env_variables.items():
if var in env:
- # TODO: WARNING log point
# Raise for now since no special cases is known
- self.logger.warning("Special var % is already defined", var)
+ _logger.warning("% Special var % is already defined",
+ self.label, var)
raise Exception("Multiple definition of a special environment variable")
else:
env[var] = value
- # TODO: INFO log point for each variable with project information
if cpp_flags:
if 'CPPFLAGS' in env:
cpp_flags = env['CPPFLAGS'] + cpp_flags
env['CPPFLAGS'] = cpp_flags
+ _logger.debug("% CPPFLAGS= %s", self.label, cpp_flags)
if ld_flags:
if 'LDFLAGS' in env:
ld_flags = env['LDFLAGS'] + ld_flags
env['LDFLAGS'] = ld_flags
+ _logger.debug("% LDFLAGS= %s", self.label, ld_flags)
if ld_library_path:
if 'LD_LIBRARY_PATH' in env:
ld_library_path = env['LD_LIBRARY_PATH'] + ":" + ld_library_path
env['LD_LIBRARY_PATH'] = ld_library_path
+ _logger.debug("% LD_LIBRARY_PATH= %s", self.label, ld_library_path)
return env
def autobuild(self):
Perform the bootstrap, configuration, build and install the
project. Build dependencies if not already built
"""
- for dep in self.dependencies:
+ if (self.isConfigured and self.isBuilt and self.isInstalled):
+ return
+
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal autobuild")
+
+ for key, dep in self.dependencies.items():
dep.autobuild()
- if self.isCheckedOut ^ self.isBootStrapped ^ self.isBootStrapped ^ self.isBuilt ^ self.isConfigured ^ self.isInstalled:
+ if self.isConfigured ^ self.isBuilt ^ self.isInstalled:
raise Exception("Project steps where manually triggered. Can't autobuild")
+ _logger.debug("% Autobuild configure", self.label)
self.configure()
+ _logger.debug("% Autobuild build", self.label)
self.build()
+ _logger.debug("% Autobuild install", self.label)
self.install()
def checkout(self):
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal checkout")
+
repo = git.Repo.clone_from(self.git_path, self.source_path)
commit = repo.commit(self.sha1)
repo.head.reference = commit
Bootstap the project. Raise subprocess.CalledProcessError on
bootstrap error.
"""
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal bootstrap")
+
+ out = os.path.join(self.log_path, "bootstrap.out")
+ err = os.path.join(self.log_path, "bootstrap.err")
+
os.chdir(self.source_path)
- p = subprocess.run(['./bootstrap'], stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ with open(out, 'w') as stdout, open(err, 'w') as stderr:
+ p = subprocess.run(['./bootstrap'], stdout=stdout, stderr=stderr)
p.check_returncode()
return p
Configure the project.
Raises subprocess.CalledProcessError on configure error
"""
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal configure")
+
# Check that all our dependencies were actually installed
- for dep in self.dependencies:
+ for key, dep in self.dependencies.items():
if not dep.isInstalled:
# TODO: Custom exception here Dependency Error
raise Exception("Dependency project flagged as not installed")
+ out = os.path.join(self.log_path, "configure.out")
+ err = os.path.join(self.log_path, "configure.err")
+
os.chdir(self.source_path)
args = ['./configure']
prefix = '--prefix={}'.format(self.installation_path)
args.extend(self.custom_configure_flags)
# TODO: log output and add INFO log point
- p = subprocess.run(args, env=self.get_env(), stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ with open(out, 'w') as stdout, open(err, 'w') as stderr:
+ p = subprocess.run(args, env=self.get_env(), stdout=stdout,
+ stderr=stderr)
p.check_returncode()
self.isConfigured = True
return p
Build the project. Raise subprocess.CalledProcessError on build
error.
"""
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal build")
+
+ out = os.path.join(self.log_path, "build.out")
+ err = os.path.join(self.log_path, "build.err")
+
os.chdir(self.source_path)
args = ['make']
env = self.get_env()
args.append(num_cpu)
# TODO: log output and add INFO log point with args
- p = subprocess.run(args, env=env, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ with open(out, 'w') as stdout, open(err, 'w') as stderr:
+ p = subprocess.run(args, env=env, stdout=stdout,
+ stderr=stderr)
p.check_returncode()
self.isBuilt = True
return p
Install the project. Raise subprocess.CalledProcessError on
bootstrap error
"""
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal install")
+
+ out = os.path.join(self.log_path, "build.out")
+ err = os.path.join(self.log_path, "build.err")
+
os.chdir(self.source_path)
args = ['make', 'install']
# TODO: log output and add INFO log point
- p = subprocess.run(args, env=self.get_env(), stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
+ with open(out, 'w') as stdout, open(err, 'w') as stderr:
+ p = subprocess.run(args, env=self.get_env(), stdout=stdout,
+ stderr=stderr)
p.check_returncode()
self.isInstalled = True
return p
pass
def install(self):
+ if self._immutable:
+ raise Exception("Object is immutable. Illegal install")
os.chdir(self.source_path)
args = ['make', 'INSTALL_MOD_PATH={}'.format(self.installation_path),
'modules_install']