LB CONFIG API  LB 4.x
api.py
Go to the documentation of this file.
1 
27 import subprocess
28 import os
29 import core
30 import sys
31 import json
32 import StringIO
33 import types
34 import re
35 
36 logicblox_dep = (
37  "logicblox", {'default_path': "$LOGICBLOX_HOME",
38  'help': "The LogicBlox installation to use for this build."})
39 lb_web_dep = (
40  "lb_web", {'default_path': "$LB_WEBSERVER_HOME",
41  'help': "The lb-web-server installation to use for this build."})
42 
44  d = os.environ.get('LB_DEPLOYMENT_HOME')
45  if not d:
46  d = os.path.expanduser('~/lb_deployment')
47  return d
48 
49 core.variable('LB_DEPLOYMENT_HOME', lb_deployment_dir())
50 
51 def depends_on(*deps, **more_deps):
52  def to_dict(s):
53  if isinstance(s, basestring):
54  return {'default_path': s}
55  else:
56  return s
57 
58  dependencies = {}
59  for k, v in deps:
60  dependencies[k] = to_dict(v)
61  for k, v in more_deps.iteritems():
62  dependencies[k] = to_dict(v)
63  core.add_dependencies(dependencies)
64 
65 def with_arg(*args, **kwargs):
66  core.add_arg(args, kwargs)
67 
68 
82 def lbconfig_package(name, default_prefix='out', version='', default_targets=['lb-libraries']):
83  core.set_default_prefix(default_prefix)
84  rule('all', default_targets)
85  if version:
86  variable('version', version)
87  variable('package_basename', '%s' % name)
88  variable('package_name', '%s-$(version)' % name)
89  else:
90  variable('package_basename', '%s' % name)
91  variable('package_name', name)
92 
93 
94 
96 def emit(line, makefile=None):
97  print 'emit is deprecated.'
98  core.emit(line, makefile)
99 
100 
101 
112 def variable(name, value):
113  core.variable(name, value)
114 
115 
116 
137 def rule(output, input, commands=None, phony=False, description=None):
138  core.rule(output, input, commands, phony, description)
139 
140 
141 
153 def target(name, deps):
154  rule(name, deps, phony=True)
155 
156 
157 
164 def add_task(task):
165  if not type(task.run) is types.MethodType:
166  print "Task %s does not have a run() method"%task
167  system.exit(1)
168  core.g_tasks.append(task)
169 
170 
175 def check_lb_library(**kwargs):
176  kwargs['install'] = False
177  lb_library(**kwargs)
178 
179 
180 
217 def lb_library(name, srcdir, srcgen=[], deps=None, install=True, generated=False, scalable=None, install_subdir='share/$(package_basename)', post_cmds=[]):
218  if scalable is not None:
219  core.deprecation_alert("Argument 'scalable' to lb_library is no longer relevant and is ignored")
220 
221  if name in core.g_projects:
222  raise core.ConfigureError("Library %s already declared." % name)
223  project = Project(name, srcdir, srcgen, deps, install, generated, install_subdir, post_cmds)
224  core.g_projects[name] = project
225 
226 
227 
273 def check_lb_workspace(name, libraries=[], native_libs=[], archived=False, init_cmds=[], input=[], compress=True, create_cmd=None, keep=False):
274  if name in core.g_workspaces:
275  raise core.ConfigureError("Workspace %s already declared.")
276  workspace = Workspace(name, libraries, native_libs, archived, init_cmds, input, compress, create_cmd, keep)
277  core.g_workspaces[name] = workspace
278 
279 
336 def check_lb_workspace_with_branch(workspace, branch, libraries=[], branch_libraries=[], native_libs=[], archived=False, init_cmds=[], input=[], compress=True, create_cmd=None, keep=False):
337  if workspace in core.g_workspaces:
338  raise core.ConfigureError("Workspace %s already declared." % workspace)
339 
340  branch_obj = Branch(name=branch, libraries=branch_libraries)
341  ws = Workspace(workspace, libraries, native_libs, archived, init_cmds, input, compress, create_cmd, keep, branches=[branch_obj])
342  core.g_workspaces[workspace] = ws
343 
344 
345 
346 
385 def ws_archive(name, libraries=[], init_cmds=[], input=[], compress=True, keep=False):
386  check_lb_workspace(name, libraries, archived=True, init_cmds=init_cmds, input=input, compress=compress, keep=keep)
387 
388 
389 
405 def import_ws_archive(name, ws_prefix=''):
406  archive_names = core.get_ws_archive_names(name)
407  ws_archive = archive_names[2]
408  commands = [
409  'mkdir -p $(build)/ws_tmp/' + name,
410  'cd $(build)/ws_tmp/' + name,
411  'tar xzf ' + ws_archive,
412  'cd ..',
413  '$(lb) import-workspace --overwrite ' + ws_prefix + name + ' ' + name,
414  'rm -rf ' + name]
415 
416  install_test_ws = 'install-test-' + name
417  rule(
418  output=install_test_ws,
419  input=ws_archive,
420  commands='&&'.join(commands),
421  phony=True,
422  description='Testing installing workspace ' + name
423  )
424 
425  rule(
426  output='install-test',
427  input=install_test_ws,
428  phony=True
429  )
430 
431 
432 
436 def check_protobuf_protocol(**kwargs):
437  kwargs['install'] = False
438  protobuf_protocol(**kwargs)
439 
440 
441 
457 def python_protobuf( name,
458  srcdir,
459  package='',
460  proto_paths=None,
461  install=True):
463  name=name, package=package, srcdir=srcdir,
464  proto_paths=proto_paths, gen_datalog=False,
465  gen_java=False, gen_python=True, install=install)
466 
467 
468 
487 def java_protobuf( name,
488  srcdir,
489  java_package,
490  package='',
491  proto_paths=None,
492  install=True,
493  outer_class=None):
495  name=name, package=package, srcdir=srcdir, proto_paths=proto_paths,
496  gen_datalog=False, gen_java=True, gen_python=False, outer_class=outer_class,
497  java_package=java_package, install=install)
498 
499 
500 
531 def protobuf_protocol( name,
532  srcdir,
533  package='',
534  java_package=None,
535  lifetime='transaction',
536  proto_path='',
537  gen_datalog=True,
538  gen_java=True,
539  outer_class=None,
540  gen_python=True,
541  install=True,
542  proto_paths=None):
543  if proto_paths is None:
544  proto_paths = []
545  proto_paths.append('.')
546  proto_paths.append('$(logicblox)/lib/protobuf')
547  if (proto_path is not None) and (proto_path != ''):
548  proto_paths.append(proto_path)
549 
550  if java_package is None:
551  gen_java = False
552  else:
553  java_pkgdir = java_package.replace('.', '/')
554 
555  pkgdir = package.replace('.', '/')
556  pkgprefix = core.protobuf_pkg_prefix(package)
557 
558  rel_proto_file = pkgprefix + name + '.proto'
559  proto_file = srcdir + '/' + pkgprefix + name + '.proto'
560 
561  # Rule for descriptor file and _proto.logic file
562  proto_path_opt = ''
563  for p in proto_paths:
564  proto_path_opt = proto_path_opt + ' --proto_path ' + p
565 
566  if gen_datalog:
567  logic_file = srcdir + '/' + pkgprefix + name + '_proto.logic'
568  rel_logic_file = pkgprefix + name + '_proto.logic'
569  descriptor_file = srcdir + '/' + pkgprefix + name + '.descriptor'
570  rel_descriptor_file = pkgprefix + name + '.descriptor'
571 
572  rule([descriptor_file, logic_file], proto_file,
573  ['mkdir -p ' + os.path.dirname(logic_file),
574  'cd ' + srcdir +
575  ' && LOGICBLOX_HOME=$(logicblox) '
576  ' $(proto2datalog) ' +
577  rel_proto_file + ' ' +
578  rel_descriptor_file +
579  ' -file ' +
580  rel_logic_file +
581  ' -lifetime ' + lifetime + proto_path_opt],
582  description='Compiling protobuf protocol %s to datalog' % proto_file)
583 
584  emit_clean_file(logic_file)
585  emit_clean_file(descriptor_file)
586 
587  if gen_python:
588  py_file = python_protobuf_file(name, package)
589  python_module_dir = os.path.dirname(py_file)
590  rule(
591  output=[py_file, python_module_dir],
592  input=proto_file,
593  commands=[
594  'mkdir -p %s' % python_module_dir,
595  'touch %s/__init__.py' % python_module_dir,
596  'cd %s && $(protoc) %s --python_out $(build)/python %s' % (srcdir, rel_proto_file, proto_path_opt)
597  ],
598  description='Compiling protobuf protocol %s to python' % proto_file)
599  rule(
600  output='python-libraries',
601  input=python_module_dir
602  )
603  core.emit_clean_dir('$(build)/python/' + pkgdir)
604  depfile = python_protobuf_file(name, package)
605  rule(output='python_protobufs', input=depfile, phony=True)
606 
607  if gen_java:
608  java_build_dir = core.get_java_protobuf_build_dir(name)
609  java_file = java_protobuf_file(name, java_package, outer_class)
610 
611  rule(
612  output=[java_file, java_build_dir],
613  input=proto_file,
614  commands=[
615  'mkdir -p %s' % java_build_dir,
616  'cd %s && $(protoc) %s --java_out %s %s' % (srcdir, rel_proto_file, java_build_dir, proto_path_opt)
617  ],
618  description='Compiling protobuf protocol %s to java' % proto_file)
619 
620  core.emit_clean_dir('$(build)/java/' + java_pkgdir)
621  depfile = java_protobuf_file(name, java_package, outer_class)
622  rule(output='java_protobufs', input=depfile, phony=True)
623 
624  if install:
625  install_file(proto_file, 'lib/protobuf/' + pkgdir)
626 
627  dist_files([proto_file])
628 
629 
630 
639 def python_protobuf_file(name, package):
640  return '$(build)/python/%s_pb2.py' % (core.protobuf_pkg_prefix(package) + name)
641 
642 
653 def java_protobuf_file(name, java_package = None, outer_class = None):
654  if java_package is None:
655  return ''
656 
657  java_pkgdir = java_package.replace('.', '/')
658  title_name = name.title().replace('_', '') if(outer_class is None) else outer_class
659  return core.get_java_protobuf_build_dir(name) + '/%s/%s.java' % (java_pkgdir, title_name)
660 
661 
662 
715 def check_jar( name,
716  main,
717  srcdir=None,
718  srcdirs=[],
719  classpath=[],
720  scala=False,
721  scala_files=None,
722  java_files=None,
723  srcgen=[],
724  deps=[],
725  javadoc=None,
726  scaladoc=None,
727  manual_targets=None,
728  workspaces=[],
729  jvm_args=[],
730  java_version="1.8",
731  javac='javac',
732  javac_flags="",
733  scalac='scalac',
734  scalac_flags="",
735  resources=[],
736  services=[],
737  container_target='check',
738  findbugs=False):
739  if manual_targets is None:
740  manual_targets = {}
741  jar(name, srcdir, srcdirs, classpath, srcgen, javadoc, scaladoc,
742  deps=deps, install=False, scala=scala, scala_files=scala_files, java_files=java_files, java_version=java_version, resources=resources,
743  javac=javac, javac_flags=javac_flags, scalac=scalac, scalac_flags=scalac_flags, findbugs=findbugs)
744 
745  jar_file = '$(build)/jars/' + name + '.jar'
746 
747  local_deps = ['$(build)/jars/' + d + '.jar' for d in deps]
748  classpath = classpath + local_deps
749 
750  inputs = [jar_file]
751 
752  for service_name in services:
753  inputs.append(core.service_started_file(service_name))
754 
755  for ws in workspaces:
756  inputs.append(core.check_lb_workspace_target(ws))
757 
758  # TODO - this call to make is a hack because check_jar does not support proper setup and teardown of services, like check_command.
759  # We need to refactor check_jar to use check_command, or at least extract the setup/teardown code from check_command.
760  commands = ['java %s -cp %s:%s %s $(testcase)' % (' '.join(jvm_args), ':'.join(classpath), jar_file, main)]
761  for service_name in services:
762  commands.append('make stop-service-' + service_name)
763 
764  rule(
765  output='check-' + name,
766  input=inputs,
767  commands=commands,
768  description='Testing %s class of jar %s' % (main, name))
769 
770  rule(
771  output=container_target,
772  input=['check-' + name])
773 
774  for key, target in manual_targets.iteritems():
775  rule(
776  output='run-' + key,
777  input=inputs,
778  commands=[
779  'java %s -cp %s:%s %s' %
780  (' '.join(jvm_args), ':'.join(classpath), jar_file, target)],
781  description='Testing %s class of jar %s' % (target, name))
782 
783 
792 def service_jar(name, srcdir = None, srcdirs = None, classpath = None, srcgen = None, javadoc = None, scaladoc = None, resources = None,
793  deps = None, install = True, java_files = None, java_version = "1.8",
794  protocols = None, findbugs=False):
795 
796  if srcdirs is None:
797  srcdirs = []
798  if srcgen is None:
799  srcgen = []
800  if classpath is None:
801  classpath = []
802  if protocols is None:
803  protocols = []
804  if resources is None:
805  resources = {}
806  if deps is None:
807  deps = []
808 
809  # add all bloxweb jars automatically to classpath
810  bloxweb_cp = core.get_bloxweb_classpath()
811  for p in bloxweb_cp:
812  classpath.append(p)
813 
814  # generate java files from protocols and include generated files in the jar
815  for proto_file in protocols:
816  proto_pkg = core.get_java_package_from_proto(proto_file)
817  pieces = os.path.split(proto_file)
818  proto_src = pieces[0]
819  proto_name = os.path.splitext(pieces[1])[0]
820  protobuf_protocol(name=proto_name, srcdir=proto_src,
821  java_package=proto_pkg, gen_java=True, gen_python=False,
822  gen_datalog=False, install=install)
823 
824  jname = proto_name.title() + '.java'
825  jname = jname.replace('_', '')
826  pkg_path = proto_pkg.replace('.', '/')
827  srcgen.append(core.get_java_protobuf_build_dir(proto_name) + '/' + pkg_path + '/' + jname)
828 
829  jar(name=name, srcdir=srcdir, srcdirs=srcdirs, classpath=classpath,
830  srcgen=srcgen, javadoc=javadoc, scaladoc=scaladoc, resources=resources, deps=deps,
831  install=install, java_files=java_files, java_version=java_version, findbugs=findbugs)
832 
833 
834 
894 def jar(name, srcdir=None,
895  srcdirs=[],
896  classpath=[],
897  srcgen=[],
898  javadoc=None,
899  scaladoc=None,
900  resources=[],
901  deps=[],
902  install=True,
903  scala=False,
904  scala_files=None,
905  java_files=None,
906  java_version="1.8",
907  manifest=None,
908  javac='javac',
909  scalac='scalac',
910  scalac_flags="",
911  javac_flags="",
912  sbt=False,
913  findbugs=False):
914  if srcdir is not None:
915  srcdirs = [srcdir] + srcdirs
916 
917  if java_files is None:
918  java_files = []
919  for d in srcdirs:
920  java_files.extend(core.find_files(d, '.java'))
921 
922  if scala_files is not None:
923  scala = True
924  else:
925  scala_files = []
926  if scala:
927  for d in srcdirs:
928  scala_files.extend(core.find_files(d, '.scala'))
929 
930  if isinstance(resources, list):
931  resourceKeys = resources
932  resources = {}
933  for f in resourceKeys:
934  resources[f] = os.path.basename(f)
935  else:
936  resourceKeys = resources.keys()
937 
938  dist_files(java_files)
939  dist_files(scala_files)
940  dist_files(resourceKeys)
941  java_files.extend(srcgen)
942 
943  jar_file = '$(build)/jars/' + name + '.jar'
944  classes_dir = '$(build)/jars/' + name + '.classes'
945 
946  local_deps = ['$(build)/jars/' + d + '.jar' for d in deps]
947  classpath = classpath + local_deps
948 
949  commands = ['mkdir -p ' + classes_dir]
950 
951  if (not sbt):
952  if scala:
953  cp = '-classpath %s' % ':'.join(classpath) if classpath else ''
954  commands.append(
955  '%s %s -d %s %s %s' % # do we wind up compiling java files twice?
956  (scalac, scalac_flags, classes_dir, cp, ' '.join(java_files + scala_files)))
957 
958  if java_files:
959  combined = classpath + [classes_dir]
960  cp = '-cp %s' % ':'.join(combined) if combined else ''
961  commands.append(
962  '%s %s -d %s -source %s -target %s %s %s' %
963  (javac, javac_flags, classes_dir, java_version, java_version, cp, ' '.join(java_files)))
964  else:
965  sbt_srcs = ','.join( ("file(\\\"%s\\\")" if (d[0:1] == "/" or d[0:1]=="$") else "baseDirectory.value / \\\"%s\\\"") % d
966  for d in (java_files + scala_files))
967  sbt_jars = ','.join("file(\\\"%s\\\")" % d for d in classpath)
968  scalac_flags_list = scalac_flags.split(" ")
969  javac_flags_list = javac_flags.split(" ")
970  scalac_flags_str = ",".join("\\\"%s\\\"" % f for f in scalac_flags_list if f[0:2] != "-D" )
971  javac_flags_str = ",".join("\\\"%s\\\"" % f for f in javac_flags_list if f[0:2] != "-D" )
972  java_opts = " ".join( [f for f in scalac_flags_list if f[0:2] == "-D"]
973  + [f for f in javac_flags_list if f[0:2] == "-D"])
974  commands.append("echo \"unmanagedSources in Compile := List( %s )\" > build.sbt" % sbt_srcs)
975  commands.append("echo >> build.sbt")
976  commands.append("echo \"unmanagedJars in Compile := List (%s).map(Attributed.blank)\" >> build.sbt" % sbt_jars)
977  commands.append("echo >> build.sbt")
978  commands.append("echo \"target := file(\\\"$(build)\\\")\" >> build.sbt")
979  commands.append("echo >> build.sbt")
980  commands.append("echo \"classDirectory in Compile := file (\\\"%s\\\")\" >> build.sbt" % classes_dir)
981  commands.append("echo >> build.sbt")
982  commands.append("echo \"scalacOptions in Compile := List (%s)\" >> build.sbt" % scalac_flags_str)
983  commands.append("echo >> build.sbt")
984  commands.append("echo \"javacOptions in Compile := List (%s)\" >> build.sbt" % javac_flags_str)
985  commands.append("JAVA_OPTS=\"%s\" sbt compile" % java_opts)
986 
987  for f in resources:
988  commands.append('mkdir -p %s/%s' % (classes_dir, os.path.dirname(resources[f])))
989  commands.append('cp -a %s %s/%s' % (f, classes_dir, resources[f]))
990 
991  manifest_deps = []
992  if manifest is not None:
993  manifest_file = '%s/Manifest.txt' % classes_dir
994  manifest_deps = [manifest_file]
995  core.create_manifest(manifest, name, classes_dir, classpath, java_files + scala_files + resourceKeys + local_deps + classpath)
996 
997  commands.append('(cd %s; jar cfm ../%s.jar Manifest.txt .)' % (classes_dir, name))
998  else:
999  commands.append('(cd %s; jar cf ../%s.jar .)' % (classes_dir, name))
1000 
1001  # cleanup files starting with @ from the dependencies, as they indicate a file that contains
1002  # a list of source files
1003  input_files = [re.sub('^@', '', x) for x in (java_files + scala_files + resourceKeys + local_deps + classpath + manifest_deps)]
1004  rule(
1005  output=jar_file,
1006  input=input_files,
1007  commands=commands,
1008  description='Compiling jar ' + name)
1009 
1010  emit_clean_file(jar_file)
1011  core.emit_clean_dir(classes_dir)
1012 
1013  rule(
1014  output='jars',
1015  input=jar_file,
1016  phony=True)
1017 
1018  rule(
1019  output='jar-' + name,
1020  input=jar_file,
1021  phony=True)
1022 
1023  if install:
1024  install_file(jar_file, 'lib/java')
1025 
1026  if javadoc is not None:
1027  javadoc_dir = '$(build)/javadoc/' + name
1028  rule(
1029  output=javadoc_dir,
1030  input=java_files,
1031  commands=[
1032  'javadoc -classpath %s:%s -windowtitle "%s" -doctitle "%s" -link "%s" -link "%s" -link "%s" -public -d %s %s' %
1033  (
1034  ':'.join(classpath),
1035  classes_dir,
1036  javadoc['title'],
1037  javadoc['title'],
1038  'http://docs.oracle.com/javase/8/docs/api/',
1039  'https://google.github.io/guava/releases/15.0/api/docs/',
1040  'http://docs.amazonwebservices.com/AWSJavaSDK/latest/javadoc/',
1041  javadoc_dir,
1042  ' '.join(java_files))
1043  ],
1044  description='Generating javadoc for ' + name)
1045 
1046  core.emit_clean_dir(javadoc_dir)
1047  if install:
1048  install_dir(javadoc_dir, 'docs/api/' + name)
1049 
1050  if scaladoc is not None:
1051  scaladoc_dir = '$(build)/scaladoc/' + name
1052  rule(
1053  output=scaladoc_dir,
1054  input=scala_files,
1055  commands=['mkdir -p %s' % (scaladoc_dir),
1056  'scaladoc -classpath %s:%s -implicits -doc-title "%s" -d %s %s' %
1057  (
1058  ':'.join(classpath),
1059  classes_dir,
1060  scaladoc['title'],
1061  scaladoc_dir,
1062  ' '.join(scala_files))
1063  ],
1064  description='Generating scaladoc for ' + name)
1065 
1066  core.emit_clean_dir(scaladoc_dir)
1067  if install:
1068  install_dir(scaladoc_dir, 'docs/scala-api/' + name)
1069 
1070  if findbugs:
1071  findbugs_html = '$(build)/' + name + '-findbugs.html'
1072  rule(
1073  output='findbugs',
1074  input=[],
1075  commands=[
1076  'fb analyze -sourcepath %s -auxclasspath %s -effort:max -exclude findbugs-exclude-filter.xml -html -output %s %s' %
1077  (':'.join(srcdirs), ':'.join(classpath), findbugs_html, jar_file)
1078  ],
1079  description='Generating findbugs report for ' + name)
1080 
1081 
1082 
1102 def python_library(package_name, srcdir=None, srcgen=[], python_files=None):
1103  pydir = package_name if srcdir is None else os.path.join(srcdir, package_name)
1104  prefix = '' if srcdir is None else srcdir
1105 
1106  collect_python_files = False
1107  if python_files is None:
1108  python_files = []
1109  collect_python_files = True
1110 
1111  ## make sure we copy __init__.py files even if not specified in python_files
1112  for dirpath, dirnames, filenames in os.walk(pydir):
1113  for filename in filenames:
1114  f = os.path.join(dirpath, filename)
1115  if collect_python_files:
1116  if f.endswith('.py'):
1117  python_files.append(f)
1118  elif filename == '__init__.py' and f not in python_files:
1119  python_files.append(f)
1120 
1121  outdir = "$(build)"
1122  for f in python_files:
1123  outfile = os.path.join(outdir, f)
1124  rule(
1125  output=outfile,
1126  input=f,
1127  commands=[
1128  'mkdir -p %s' % outdir,
1129  'mkdir -p %s/`dirname %s`'%(outdir,f),
1130  'cp -f %s %s/`dirname %s`'%(f,outdir,f)],
1131  description='Copying python library %s to the build dir' % f)
1132  rule(
1133  output='python-libraries',
1134  input=outfile)
1135  # only install python files that are not tests
1136  if '/test/' not in f:
1137  install_file(outfile, 'lib/python/' + os.path.dirname(f[len(prefix):]))
1138 
1139 
1140  pygendir = os.path.join('$(build)', 'python', package_name)
1141  for f in srcgen:
1142  prefixdir = os.path.join('lib', 'python', package_name, os.path.dirname(f))
1143  install_file(os.path.join(pygendir, f), prefixdir)
1144 
1145  dist_files(python_files)
1146  core.emit_clean_dir('$(build)/' + pydir)
1147 
1148 
1149 
1161 def lbconfig_plugin(package_name, srcdir, plugin_module, plugin_deps=[]):
1162  python_library(package_name, srcdir)
1163  if plugin_deps:
1164  core.add_plugin(
1165  plugin_module,
1166  build_dir=os.path.join('$(build)', 'python'),
1167  install_dir='lib/python/',
1168  deps=plugin_deps)
1169 
1170 
1171 
1177 def bin_program(name):
1178  install_file(name, 'bin')
1179  dist_files([name])
1180 
1181 
1182 
1183 
1204 def check_lbunit_suite(name,suite, workspaces=[], libraries=[], env=None, container_target='check'):
1205  command = "$(lb) unit --suite-dir %s"%suite
1206  check_command("check-lbunit-%s"%name,command, name, workspaces, libraries, [], env, container_target=container_target)
1207 
1208 
1229 def check_lbunit_test(name,test, workspaces=[], libraries=[], env=None,container_target='check'):
1230  command = "$(lb) unit --test %s"%test
1231  check_command("check-lbunit-%s"%name,command, name, workspaces, libraries, [], env, container_target=container_target)
1232 
1233 
1263 def check_program(filepath, workspaces=[], libraries=[], name=None, services=[], env=None, container_target='check', input=None, params=''):
1264  if name is None:
1265  paths = core.splitall(filepath)
1266  assert len(paths) >= 2, "The test file for check_program must be in a folder(s). Paths: %s File: %s" % (paths, filepath)
1267  suite_name = paths[-2]
1268  else:
1269  suite_name = name
1271  check_target = 'check-%s' % suite_name
1272 
1273  all_deps = [filepath]
1274  if input is not None:
1275  for d in input:
1276  all_deps.append(d)
1277 
1278  ## add makefile variable that can be used to
1279  ## select the testcase we want to run
1280  command = '%s %s $(testcase)' % (filepath, params)
1281 
1282  result_file = check_command(check_target, command, suite_name, workspaces, libraries, services, env, extra_inputs=all_deps, container_target= container_target)
1283  dist_files([filepath])
1284  return {'check_target': check_target, 'result_file': result_file}
1285 
1286 
1287 
1288 
1316 def check_command(check_target,command,suite_name, workspaces=[], libraries=[], services=[], env=None, extra_inputs=[], container_target='check'):
1317 
1318 
1319  # test setup
1320  setup_file = os.path.join("$(build)", "%s.setup" % check_target)
1321  setup_commands = [
1322  'mkdir -p $(build)'
1323  ]
1324  setup_inputs = []
1325  for service_name in services:
1326  setup_inputs.append(core.service_started_file(service_name))
1327  for ws in workspaces:
1328  setup_inputs.append(core.check_lb_workspace_target(ws))
1329  for lib in libraries:
1330  rel_summary_file = core.lb_library_summary_file(lib)
1331  summary_file = '$(build)/sepcomp/' + rel_summary_file
1332  setup_inputs.append(summary_file)
1333 
1334  rule(
1335  output=setup_file,
1336  input=setup_inputs,
1337  commands=setup_commands,
1338  description='Setting up for test suite ' + suite_name
1339  )
1340 
1341  # compute the string to export environment variables
1342 
1343  env_string = ''
1344  if env is not None:
1345  env_string = " ".join("%s=%s" % (key, value) for key, value in env.items())
1346 
1347  # test execution
1348  # writes status code to result file which will then be checked in the
1349  # main 'check' command. This allows all tests to run and therefore capture
1350  # all errors before failing.
1351  result_file = os.path.join("$(build)", "%s.result" % check_target)
1352  result_commands = [
1353  '%s %s ; echo $$? > %s' % (env_string, command, result_file)
1354  ]
1355  result_inputs = [ setup_file ]
1356  for i in extra_inputs:
1357  result_inputs.append(i)
1358  rule(
1359  output=result_file,
1360  input=result_inputs,
1361  commands=result_commands,
1362  description='Running test command ' + command
1363  )
1364  emit_clean_file(setup_file)
1365  emit_clean_file(result_file)
1366 
1367 
1368  # test teardown
1369  teardown_target = "teardown-%s" % suite_name
1370  teardown_inputs = [result_file]
1371  for service_name in services:
1372  teardown_inputs.append(core.service_started_file(service_name))
1373  teardown_inputs.append(core.service_stop_target(service_name))
1374  rule(
1375  output=core.service_stop_target(service_name),
1376  input=result_file
1377  )
1378 
1379  teardown_commands = ['rm -f %s' % setup_file]
1380  rule(
1381  output=teardown_target,
1382  input=teardown_inputs,
1383  commands=teardown_commands,
1384  phony=True,
1385  description='Tearing down test suite ' + suite_name
1386  )
1387 
1388  # check target
1389  # fails if the result file has a bad error code
1390  rule(
1391  output=check_target,
1392  input=[result_file, teardown_target],
1393  commands=['$(Q)test $$(cat %s) = "0"' % result_file],
1394  phony=True
1395  )
1396 
1397  rule(
1398  output=container_target,
1399  input=check_target
1400  )
1401  return result_file
1402 
1403 
1404 
1405 
1407 def config_file(name):
1408  install_file(name, 'config')
1409  dist_files([name])
1410 
1411 
1412 
1420 def install_files(filenames, destdir):
1421  for f in filenames:
1422  install_file(f, destdir)
1423 
1424 
1425 
1433 def install_file(filename, destdir):
1434  destdir = core.fix_install_destdir(destdir)
1435  rule(
1436  output='install',
1437  input=filename,
1438  commands='mkdir -p ' + destdir + ' && cp -pf ' + filename + ' ' + destdir,
1439  description='Installing files into $(prefix)')
1441 
1442 
1455 def install_dir(dirname, destdir, allow_empty=False):
1456  destdir = core.fix_install_destdir(destdir)
1457  copy_command = 'cp -fR %s/* %s || echo "Skipping installation of empty directory \'%s\'" ' % (dirname, destdir, dirname)
1458  if allow_empty:
1459  copy_command = 'ls %s/* &>/dev/null && %s' % (dirname, copy_command)
1460  rule(
1461  output='install',
1462  input=dirname,
1463  commands='mkdir -p %s && %s ' % (destdir, copy_command),
1464  description='Installing files into $(prefix)')
1465 
1466 
1467 
1469 def dist_files(files):
1470  copy_commands = []
1471  for f in files:
1472  copy_commands.append('mkdir -p $(package_name)/`dirname %s`'%f)
1473  copy_commands.append('cp -f %s $(package_name)/`dirname %s`'%(f, f))
1474 
1475  rule(
1476  output='dist_files',
1477  input='dist_dir',
1478  commands=copy_commands,
1479  description='Copying files to dist dir')
1480 
1481  rule(
1482  output='dist_dir',
1483  input=files)
1484 
1485 
1486 
1488 def dist_dir(directory):
1489  files = []
1490  for dirpath, dirnames, filenames in os.walk(directory):
1491  for filename in filenames:
1492  f = os.path.join(dirpath, filename)
1493  files.append(f)
1494  dist_files(files)
1496 
1497 
1504 def copy_file(src, target):
1505  rule(
1506  output=target,
1507  input=src,
1508  commands='cp -f $< $@',
1509  description='Copying %s to %s' % (src, target))
1510 
1512 
1521 def emit_clean_workspace(workspace, success_file="."):
1522  rule(
1523  output='clean-workspaces',
1524  input=[],
1525  # the minus makes it not stop if it fails
1526  # use if condition to only delete existing workspaces
1527  # this speeds up clean operation significantly
1528  commands="-if [ -e %s ]; then lb delete --force %s;fi" % (success_file, workspace),
1529  description='Cleaning up')
1530  emit_clean_file(success_file, 'clean-workspaces')
1531  rule('clean', input=['clean-workspaces'])
1532 
1533 
1534 
1536 def emit_clean_file(filename, output='clean'):
1537  rule(
1538  output=output,
1539  input=[],
1540  commands='rm -f ' + filename,
1541  description='Cleaning up')
1542 
1544 
1550 def link_libs(libraries):
1551  link_commands = ['mkdir -p lib']
1552  for dep in libraries:
1553  link_commands.append('ln -f -s ' + dep + ' lib/')
1554 
1555  rule(
1556  output='link_libs',
1557  input=[],
1558  phony=True,
1559  commands=link_commands,
1560  description='Setting up links for library dependencies.'
1561  )
1562 
1563  rule(
1564  output='clean',
1565  input=[],
1566  commands='rm -f lib/*',
1567  description='Cleaning up library links')
1568 
1569 
1570 ### THESE ARE PRIVATE FUNCTIONS THAT END USERS SHOULD NOT USE
1571 ### They are here because they depend on the public api.
1572 ### As we improve lbconfig, we should move these private methods to core.py
1573 
1574 ## private
1575 def write_lbconfig_deps(build_dir, module_name, deps, install_dir):
1576  def make_dict(dep):
1577  return {'default_path': '$(%s)' % dep.name, 'help': dep.help}
1578  dependencies = dict((k,make_dict(dep)) for k,dep in core.get_dependencies().iteritems())
1579  dep_file = '%s/.%s_deps' % (build_dir, module_name)
1580  deps = dict((dep, dependencies[dep]) for dep in deps)
1581  rule(dep_file, [],
1582  ['$(Q)mkdir -p %s' % os.path.split(dep_file)[0],
1583  '$(Q)echo %s > %s' % (core.escape_for_bash(json.dumps(deps)), dep_file)],
1584  phony=True)
1585  emit_clean_file(dep_file)
1586  install_file(dep_file, install_dir)
1587 
1588 
1589 ## private
1590 
1592 def write_projects():
1593  install_libraries = []
1594  check_libraries = []
1595  all_dist_files = []
1596  warnings = []
1597  errors = []
1598  for p in core.g_projects.values():
1599  # first do all the I/O & dependency analysis
1600  p.populate()
1601  errors += p.errors
1602  warnings += p.warnings
1603 
1604  for p in core.g_projects.values():
1605  p.write()
1606 
1607  all_dist_files.extend(p.real_src_dependencies())
1608 
1609  if p.install:
1610  install_libraries.append(p.summary_file)
1611  else:
1612  check_libraries.append(p.summary_file)
1613 
1614  rule(
1615  output='lb-libraries',
1616  input=install_libraries)
1617 
1618  rule(
1619  output='check-lb-libraries',
1620  input=check_libraries)
1621 
1622  dist_files(all_dist_files)
1623  return (warnings, errors)
1624 
1625 ## private
1626 
1628 def write_makefile(extension_files=[],makefile_name='Makefile'):
1629  core.g_makefile = StringIO.StringIO()
1630  warnings = []
1631  errors = []
1632  core.emit_dependencies()
1633 
1634  # get projects' errors and warnings
1635  (proj_warnings, proj_errors) = write_projects()
1636  warnings += proj_warnings
1637  errors += proj_errors
1638 
1639  # get workspaces' errors and warnings
1640  (ws_warnings, ws_errors) = core.write_workspaces()
1641  warnings += ws_warnings
1642  errors += ws_errors
1643 
1644  # get tasks' errors and warnings
1645  (task_warnings, task_errors) = core.run_tasks()
1646  warnings += task_warnings
1647  errors += task_errors
1648 
1649  # OPTMIZE_ME!
1650  # The following code checks whether the dependency directories/files actually
1651  # exist. It does so by generating a temporary makefile and calling it repeatedly.
1652  # This is very inefficient, so it takes a long time. We could check it directly
1653  # in python.
1654  deps = core.get_dependencies()
1655  for k, v in deps.iteritems():
1656  dep = k
1657  if v.path.startswith('$') and not v.path.startswith('$('):
1658  # is an environment variable, must test differently
1659  command = '$(Q)test %s && test -e %s' % (v.path, v.path)
1660  else:
1661  command = '$(Q)test -e $(%s)' % dep
1662  rule('check-argument-' + dep, [], command)
1663 
1664  for plugin in core.g_plugins:
1666  plugin['build_dir'], plugin['module_name'], plugin['deps'], plugin['install_dir'])
1667 
1668  if extension_files:
1669  dist_files(extension_files)
1670 
1671  core.write_variables()
1672  core.write_rules()
1673  # writing to a tmp file to check variable using make check-argument-
1674  tmp_file = "%s_tmp"%makefile_name
1675  core.to_file(tmp_file)
1676  for k, v in deps.iteritems():
1677  dep = k
1678 
1679  check = subprocess.call(['make', '-f', tmp_file, 'check-argument-' + dep],
1680  stdout=open(os.devnull, "w"),
1681  stderr=subprocess.STDOUT)
1682  if check != 0:
1683  if deps[dep].is_optional():
1684  print 'warning: directory \'{0}\' for \'{1}\' does not exist'.format(v.path, dep)
1685  else:
1686  print 'error: directory \'{0}\' for \'{1}\' does not exist'.format(v.path, dep)
1687  sys.exit(1)
1688  # report errors and exit
1689  if len(errors) > 0:
1690  for er in errors:
1691  print "ERROR:%s\n"%er
1692  sys.exit(1)
1693  # report warnings
1694  if len(warnings) > 0:
1695  for w in warnings:
1696  print "WARN:%s\n"%w
1697  os.remove(tmp_file)
1698  core.to_file(makefile_name)
1699 
1700 
1701 ## private
1702 
1705 class Project(object):
1706 
1707  def __init__(self, name, srcdir, srcgen=[], deps=None, install=True, generated=False, install_subdir='share/$(package_basename)',
1708  post_cmds=[]):
1709  # the library name
1710  self.name = name
1711  # the root directory for the source files
1712  self.srcdir = srcdir
1713  # any src files that are generated and therefore not available at
1714  # configure time
1715  self.srcgen = srcgen
1716  # any libraries this project depends on that the user explicitly listed
1717  if not deps:
1718  self.deps = {}
1719  elif type(deps) is list:
1720  self.deps = dict((dep, '$(%s)' % dep) for dep in deps)
1721  else:
1722  self.deps = deps
1723  # whether or not this project should be installed in the 'make install'
1724  # target
1725  self.install = install
1726  self.install_subdir = install_subdir
1727  # whether or not this project is generated
1728  self.generated = generated
1729  # optional commands to execute after the project is built
1730  self.post_cmds = post_cmds
1731 
1732  sepcomp = '$(build)/sepcomp'
1733  slashy_name = self.name.replace(':', '_')
1735  # the project file path for this project
1736  self.project_file = "%s/%s.project" % (self.srcdir, slashy_name)
1737  if not generated and not os.path.exists(self.project_file):
1738  raise core.ConfigureError("Could not find %s/%s.project for library %s." % (self.srcdir, slashy_name, name))
1739 
1740  # the resulting summary file path for this project
1741  self.summary_file = "%s/%s/LB_SUMMARY.lbp" % (sepcomp, slashy_name)
1742  # the directory where the seperately compiled project will be placed
1743  self.outdir = os.path.dirname(self.summary_file)
1745  # The following are populated in the populate() method which should
1746  # be called after object construction
1747 
1748  # any libraries this project depends on that are not compiled by
1749  # buildlib. These are typically system libraries like bloxweb.
1750  self.compiled_libraries = {}
1751  # the names of all dependent libraries, source & compiled
1752  self.dependent_libraries = []
1753  # a list of all source files for this project
1754  self.source_dependencies = []
1755  # a list of all file dependencies from other libraries, typically they
1756  # are lb summary files
1757  self.library_dependencies = []
1759  self.related_projects = {}
1760  self.warnings= []
1761  self.errors=[]
1763 
1764 
1769  def populate(self):
1770 
1771  if not self.generated:
1772  # lb_library_deps will want to parse the associated .project file,
1773  # only safe for projects that are not being generated at configure/build time
1774  self.dependent_libraries = core.lb_library_deps(self.project_file)
1775 
1776  self.dependent_libraries.extend(self.deps.keys())
1778  for dep in self.dependent_libraries:
1779  found_dep = False
1780  if dep in self.deps:
1781  found_dep = True
1782  self.compiled_libraries[dep] = self.deps[dep]
1783  # if a dep is explicitly specified in the deps, then
1784  # don't continue to consider other ways of finding it.
1785  continue
1786  if dep in core.g_compiledlibs:
1787  found_dep = True
1788  self.compiled_libraries[dep] = core.g_compiledlibs[dep]
1789  if dep in core.g_projects:
1790  found_dep = True
1791  self.related_projects[dep] = core.g_projects[dep]
1792  if not found_dep:
1793  self.errors.append("Library %s, needed by %s, not found."%(dep, self.name))
1796 
1797  @property
1798 
1801  def libpath(self):
1802 
1803  libpath = ['$(build)/sepcomp']
1804  for dep in self.compiled_libraries.values():
1805  libpath.append(dep)
1806  for p in self.related_projects.values():
1807  libpath.extend(p.libpath.split(":"))
1808 
1809  return ":".join(frozenset(libpath))
1810 
1811  def _get_library_dependencies(self, dependent_libraries):
1812  dependencies = []
1813  for dep in dependent_libraries:
1814  if dep in core.g_projects:
1815  dependencies.append(core.g_projects.get(dep).summary_file)
1816 
1817  return dependencies
1818 
1819 
1822  def _get_source_dependencies(self):
1823 
1824  project_file_dir = os.path.dirname(self.project_file)
1825  dependencies = []
1826  if not self.generated:
1827  files = core.files_used(self.project_file)
1828  dependencies = [self.project_file]
1829  for dep in files:
1830  dependencies.append(os.path.join(project_file_dir, dep))
1831 
1832  # generated source files in modules might not have been found yet
1833  # (they don't exist at configure time), so we extend the
1834  # dependencies.
1835  dependencies.extend([os.path.join(project_file_dir, src) for src in self.srcgen])
1836 
1837  return dependencies
1838 
1839 
1841  def real_src_dependencies(self):
1842 
1843  real_src_files = []
1844  if not self.generated:
1845  for dep in self.source_dependencies:
1846  if not dep in self.srcgen:
1847  real_src_files.append(dep)
1848 
1849  return real_src_files
1850 
1851 
1853  def write(self):
1854 
1855  # executable make target for building this library
1856  rule(
1857  output=self.name.replace(':', '_'),
1858  input=self.summary_file,
1859  phony=True)
1860 
1861  # Remove the file on compilation failure. Otherwise, running
1862  # make twice in a row will cause the second run to skip building
1863  # the broken library.
1864  trailing_opt = ' ' + self.project_file
1865 
1866  env_vars = 'LOGICBLOX_HOME=$(logicblox) '
1867 
1868  rule(
1869  output=[self.summary_file, self.outdir],
1870  input=self.source_dependencies + self.library_dependencies,
1871  commands=[
1872  'mkdir -p %s' % self.outdir,
1873  "%s sh -c '$(logicblox)/bin/lb compile project --libpath %s --out-dir %s' || ( rm -f %s; exit 1)"
1874  % (env_vars, self.libpath, self.outdir + trailing_opt, self.summary_file),
1875  'touch %s' % self.summary_file
1876  ] + self.post_cmds,
1877  description='Compiling logicblox project ' + self.name)
1878 
1879  if self.install:
1880  install_dir(self.outdir, self.install_subdir + '/' + self.name.replace(':', '_'))
1881 
1882  core.emit_clean_dir(os.path.dirname(self.summary_file))
1883 
1884 
1885 def check_ws_success_file(ws_name):
1886  target = core.lb_workspace_target(ws_name)
1887  return '$(build)/check_workspace_%s.success' % target
1888 
1889 
1890 def archive_export_target(ws_name):
1891  return core.get_ws_archive_names(ws_name)[1]
1892 
1893 ## private
1894 class Branch(object):
1895  def __init__(self, name, parent=None, libraries=[]):
1896  self.name = name
1897  self.libraries = libraries
1898  self.parent = parent
1899  self.projects = []
1900 
1901 
1904  def populate(self):
1906  for lib in self.libraries:
1907  if lib in core.g_projects:
1908  self.projects.append(core.g_projects[lib])
1909  else:
1910  self.errors.append("Library %s, needed by branch %s not found."%(lib,self.name))
1911  if len(self.projects) > 1:
1912  self.warnings.append("Please specify at most 1 project to add to branch \"%s\". Adding more than 1 project to the workspace can cause bad online performance and impact the size of the built workspace."%(self.name))
1913 
1914 ## private
1915 
1919 class Workspace(object):
1920 
1921  def __init__(self, name, libraries=[], native_libs=[], archived=False, init_cmds=[], input=[], compress=True, create_cmd=None, keep=False, branches=[]):
1922  self.name = name
1923  self.libraries = libraries
1924  self.native_libs = native_libs
1925  # a list of all Projects to install. Used for getting proper libpaths.
1926  self.projects = []
1927  self.warnings=[]
1928  self.errors=[]
1929  # if the workspace should be exported and archived
1930  self.archived=archived
1931  # if the original workspace is to be kept (only used if archived is true)
1932  self.keep=keep
1933  self.init_cmds=init_cmds
1934  self.input=input
1935  # if the archived workspace should be compressed as tgz
1936  self.compress=compress
1937  self.create_cmd=create_cmd
1938  self.branches = branches
1939 
1943  def populate(self):
1945  for lib in self.libraries:
1946  if lib in core.g_projects:
1947  self.projects.append(core.g_projects[lib])
1948  else:
1949  self.errors.append("Library %s, needed by workspace %s not found."%(lib,self.name))
1950  if len(self.projects) > 1:
1951  self.warnings.append("Please specify at most 1 project to add to workspace \"%s\". Adding more than 1 project to the workspace can cause bad online performance and impact the size of the built workspace."%(self.name))
1952 
1953  for branch in self.branches:
1954  branch.populate()
1955 
1956 
1958  def write(self):
1959 
1960  success_file = check_ws_success_file(self.name)
1961 
1962  inputs = []
1963  if self.create_cmd is not None:
1964  create_cmd = self.create_cmd
1965  else:
1966  create_cmd = '$(lb) create %s --overwrite' % self.name
1967  if len(self.native_libs) > 0:
1968  libs_arg = '--libs ' + ','.join(self.native_libs)
1969  create_cmd += ' ' + libs_arg
1970 
1971  # put the removal of the success file first so make will fail if one
1972  # of the commands fails
1973  rm_cmd = 'rm -f %s' % success_file
1974  commands = [rm_cmd, create_cmd]
1975 
1976  for branch in self.branches:
1977  branch_cmd = '$(lb) branch %s %s' % (self.name, branch.name)
1978  if branch.parent is not None:
1979  parent_cmd = '--parent %s' % branch.parent
1980  branch_cmd += ' ' + parent_cmd
1981 
1982  commands.append(branch_cmd)
1983  for project in branch.projects:
1984  inputs.append(project.summary_file)
1985  commands.append('$(lb) addproject %s %s --branch %s --libpath %s --commit-mode diskcommit' % (self.name, project.outdir, branch.name, project.libpath))
1986 
1987  for project in self.projects:
1988  inputs.append(project.summary_file)
1989  commands.append('$(lb) addproject %s %s --libpath %s --commit-mode diskcommit' % (self.name, project.outdir, project.libpath))
1990 
1991  if self.init_cmds is not None:
1992  for c in self.init_cmds:
1993  commands.append(c)
1994 
1995  commands.append(core.touch(success_file))
1996 
1997  if self.input is not None:
1998  for d in self.input:
1999  inputs.append(d)
2000 
2001  rule(
2002  output=success_file,
2003  input=inputs,
2004  commands='&&'.join(commands),
2005  description='Building workspace ' + self.name
2006  )
2007 
2008  emit_clean_workspace(self.name, success_file)
2009 
2010  check_target = core.check_lb_workspace_target(self.name)
2011  rule(
2012  output=check_target,
2013  input=success_file,
2014  phony=True
2015  )
2016 
2017  # if archived flag is true, generate a tgz file from the workspace and
2018  # delete the deployed workspace
2019  if self.archived:
2020  archive_names = core.get_ws_archive_names(self.name)
2021  ws_dir = archive_names[0]
2022  ws_export = archive_names[1]
2023  ws_archive = archive_names[2]
2024 
2025  archive_cmds = []
2026  archive_cmds.append('mkdir -p ' + ws_dir)
2027  archive_cmds.append('$(lb) export-workspace --overwrite ' + self.name + ' ' + ws_export)
2028  if not self.keep:
2029  archive_cmds.append('$(lb) delete ' + self.name)
2030  if self.compress:
2031  archive_cmds.append('(cd ' + ws_export + '; tar czf ../' + self.name + '.tgz *)')
2032  archive_cmds.append('rm -rf ' + ws_export)
2033  rule(
2034  output=ws_export + '.tgz',
2035  input=success_file,
2036  commands='&&'.join(archive_cmds),
2037  description='Archiving workspace ' + self.name
2038  )
2039  rule(
2040  output='archive-ws-' + self.name,
2041  input=ws_export + '.tgz',
2042  phony=True
2043  )
2044  emit_clean_file(ws_archive)
2045  else:
2046  rule(
2047  output=ws_export,
2048  input=success_file,
2049  commands='&&'.join(archive_cmds),
2050  description='Archiving workspace ' + self.name
2051  )
2052  rule(
2053  output='archive-ws-' + self.name,
2054  input=ws_export,
2055  phony=True
2056  )
2057  core.emit_clean_dir(ws_export)
2058 
2059 
2060 # import workflow module for backwards compatibility
2061 from workflow import *
2062 
def write(self)
Writes the Make rules to compile this project to the Makefile.
Definition: api.py:1861
def check_ws_success_file(ws_name)
Definition: api.py:1893
def archive_export_target(ws_name)
Definition: api.py:1898
def emit_clean_workspace(workspace, success_file=".")
Remove a workspace on clean.
Definition: api.py:1528
def python_protobuf(name, srcdir, package='', proto_paths=None, install=True)
Create a build task that will generate python wrappers from a protobuf protocol file.
Definition: api.py:463
def real_src_dependencies(self)
Collect the files to distribute, removing the generated source files.
Definition: api.py:1849
def java_protobuf(name, srcdir, java_package, package='', proto_paths=None, install=True, outer_class=None)
Create a build task that will generate Java classes from a protobuf protocol file.
Definition: api.py:496
def libpath(self)
A helper function for determining the libpath for this project.
Definition: api.py:1809
def rule(output, input, commands=None, phony=False, description=None)
A rule specifying commands to be invoked to make output files.
Definition: api.py:137
def with_arg(args, kwargs)
Definition: api.py:65
def java_protobuf_file(name, java_package=None, outer_class=None)
Given a protobuf protocol name and a package, will return the full path to the generated Java file...
Definition: api.py:657
def ws_archive(name, libraries=[], init_cmds=[], input=[], compress=True, keep=False)
Archives a workspace.
Definition: api.py:386
def python_protobuf_file(name, package)
Given a protobuf protocol name and a package, will return the full path to the generated Python file...
Definition: api.py:643
def populate(self)
Populates the projects attribute of this object.
Definition: api.py:1951
def lb_library(name, srcdir, srcgen=[], deps=None, install=True, generated=False, scalable=None, install_subdir='share/$(package_basename)', post_cmds=[])
Creates a make task to compile an LogicBlox library.
Definition: api.py:218
def install_file(filename, destdir)
Install a file into /destdir.
Definition: api.py:1440
def check_lbunit_suite(name, suite, workspaces=[], libraries=[], env=None, container_target='check')
Emits a phony target that runs a suite lb-unit tests.
Definition: api.py:1211
def bin_program(name)
Name of a script or executable to be installed in /bin.
Definition: api.py:1184
def __init__(self, name, parent=None, libraries=[])
Definition: api.py:1903
def add_task(task)
Add a build task.
Definition: api.py:164
def check_lbunit_test(name, test, workspaces=[], libraries=[], env=None, container_target='check')
Emits a phony target that runs a lb-unit test.
Definition: api.py:1236
private
Definition: api.py:1902
def install_dir(dirname, destdir, allow_empty=False)
Install contents of a directory into /destdir.
Definition: api.py:1462
def link_libs(libraries)
Create a &#39;link_libs&#39; phony target that, when called, will create a lib/ directory in the source folde...
Definition: api.py:1557
def service_jar(name, srcdir=None, srcdirs=None, classpath=None, srcgen=None, javadoc=None, scaladoc=None, resources=None, deps=None, install=True, java_files=None, java_version="1.8", protocols=None, findbugs=False)
Compiles a jar containing a bloxweb service implementation.
Definition: api.py:800
def emit(line, makefile=None)
Add a line to the makefile.
Definition: api.py:96
def check_jar(name, main, srcdir=None, srcdirs=[], classpath=[], scala=False, scala_files=None, java_files=None, srcgen=[], deps=[], javadoc=None, scaladoc=None, manual_targets=None, workspaces=[], jvm_args=[], java_version="1.8", javac='javac', javac_flags="", scalac='scalac', scalac_flags="", resources=[], services=[], container_target='check', findbugs=False)
Creates JAR package with test cases and adds tests to check target.
Definition: api.py:743
def emit_clean_file(filename, output='clean')
Remove file on clean.
Definition: api.py:1543
def write_projects()
private
Definition: api.py:1599
def check_lb_library(kwargs)
Compiles an lb library, but does not install it.
Definition: api.py:175
def variable(name, value)
Declare a variable in Makefile.
Definition: api.py:112
def python_library(package_name, srcdir=None, srcgen=[], python_files=None)
Copies the given python library into the /lib/python directory when running &#39;make&#39; and into the /lib/...
Definition: api.py:1109
def check_protobuf_protocol(kwargs)
Creates files for protobuf protocol, but does not install them.
Definition: api.py:437
def check_program(filepath, workspaces=[], libraries=[], name=None, services=[], env=None, container_target='check', input=None, params='')
Emits a phony target that runs the file at the given file path.
Definition: api.py:1270
def write(self)
Writes the Make rules to create the workspace to the Makefile.
Definition: api.py:1966
def protobuf_protocol(name, srcdir, package='', java_package=None, lifetime='transaction', proto_path='', gen_datalog=True, gen_java=True, outer_class=None, gen_python=True, install=True, proto_paths=None)
Creates file for the protobuf protocol from proto files.
Definition: api.py:546
def lb_deployment_dir()
Definition: api.py:43
def dist_files(files)
Make a list of files part of the source distribution.
Definition: api.py:1476
def _get_library_dependencies(self, dependent_libraries)
Definition: api.py:1819
def check_lb_workspace(name, libraries=[], native_libs=[], archived=False, init_cmds=[], input=[], compress=True, create_cmd=None, keep=False)
Creates a workspace and installs libraries into the workspace.
Definition: api.py:274
def import_ws_archive(name, ws_prefix='')
Generate a makefile target prefixed by &#39;install-test-&#39; followed by the specified workspace name...
Definition: api.py:406
def install_files(filenames, destdir)
Install a list of files into /destdir.
Definition: api.py:1427
def _get_source_dependencies(self)
A helper function for determining all the source file depenencies for this project.
Definition: api.py:1830
def populate(self)
Populate is responsible for doing all the I/O dirty work such as parsing the project file to determin...
Definition: api.py:1777
def write_lbconfig_deps(build_dir, module_name, deps, install_dir)
THESE ARE PRIVATE FUNCTIONS THAT END USERS SHOULD NOT USE They are here because they depend on the pu...
Definition: api.py:1582
def lbconfig_plugin(package_name, srcdir, plugin_module, plugin_deps=[])
Declare a python module as an lbconfig_plugin.
Definition: api.py:1168
def lbconfig_package(name, default_prefix='out', version='', default_targets=['lb-libraries'])
Sets the name of the package and the list of targets that should be run when calling &#39;make&#39;...
Definition: api.py:82
def depends_on(deps, more_deps)
Definition: api.py:51
def write_makefile(extension_files=[], makefile_name='Makefile')
private
Definition: api.py:1635
def copy_file(src, target)
A rule to copy a file.
Definition: api.py:1511
def populate(self)
Populates the projects attribute of this object.
Definition: api.py:1912
def target(name, deps)
Adds a project-specific target with a set of dependencies to the Makefile.
Definition: api.py:153
def jar(name, srcdir=None, srcdirs=[], classpath=[], srcgen=[], javadoc=None, scaladoc=None, resources=[], deps=[], install=True, scala=False, scala_files=None, java_files=None, java_version="1.8", manifest=None, javac='javac', scalac='scalac', scalac_flags="", javac_flags="", sbt=False, findbugs=False)
Build a jar by compiling Java files with javac, and optionally Scala files with scalac.
Definition: api.py:920
def check_lb_workspace_with_branch(workspace, branch, libraries=[], branch_libraries=[], native_libs=[], archived=False, init_cmds=[], input=[], compress=True, create_cmd=None, keep=False)
Creates a workspace with a branch and installs libraries into them.
Definition: api.py:337
def config_file(name)
A configuration file that should be installed into /config.
Definition: api.py:1414
def check_command(check_target, command, suite_name, workspaces=[], libraries=[], services=[], env=None, extra_inputs=[], container_target='check')
Low level entry point to check a command, used by check_program and check_lbunit. ...
Definition: api.py:1323
def dist_dir(directory)
Make a directory recursively part of a distribution.
Definition: api.py:1495