1 BB_DEFAULT_TASK ?= "build"
29 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
30 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
33 def base_dep_prepend(d):
35 # Ideally this will check a flag so we will operate properly in
36 # the case where host == build == target, for now we don't work in
39 deps = "shasum-native coreutils-native"
40 if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
42 if bb.data.getVar('PN', d, True) == "coreutils-native":
43 deps = "shasum-native"
45 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
46 # we need that built is the responsibility of the patch function / class, not
48 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
49 if (bb.data.getVar('HOST_SYS', d, 1) !=
50 bb.data.getVar('BUILD_SYS', d, 1)):
51 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
54 DEPENDS_prepend="${@base_dep_prepend(d)} "
55 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
56 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
59 SCENEFUNCS += "base_scenefunction"
61 python base_scenefunction () {
62 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
63 if os.path.exists(stamp):
64 bb.build.exec_func("do_clean", d)
67 python base_do_setscene () {
68 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
69 bb.build.exec_func(f, d)
70 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
71 bb.build.make_stamp("do_setscene", d)
73 do_setscene[selfstamp] = "1"
74 addtask setscene before do_fetch
77 do_fetch[dirs] = "${DL_DIR}"
78 do_fetch[depends] = "shasum-native:do_populate_staging"
79 python base_do_fetch() {
82 localdata = bb.data.createCopy(d)
83 bb.data.update_data(localdata)
85 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
90 bb.fetch.init(src_uri.split(),d)
91 except bb.fetch.NoMethodError:
92 (type, value, traceback) = sys.exc_info()
93 raise bb.build.FuncFailed("No method: %s" % value)
94 except bb.MalformedUrl:
95 (type, value, traceback) = sys.exc_info()
96 raise bb.build.FuncFailed("Malformed URL: %s" % value)
99 bb.fetch.go(localdata)
100 except bb.fetch.MissingParameterError:
101 (type, value, traceback) = sys.exc_info()
102 raise bb.build.FuncFailed("Missing parameters: %s" % value)
103 except bb.fetch.FetchError:
104 (type, value, traceback) = sys.exc_info()
105 raise bb.build.FuncFailed("Fetch failed: %s" % value)
106 except bb.fetch.MD5SumError:
107 (type, value, traceback) = sys.exc_info()
108 raise bb.build.FuncFailed("MD5 failed: %s" % value)
110 (type, value, traceback) = sys.exc_info()
111 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
114 # Verify the SHA and MD5 sums we have in OE and check what do
116 checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
118 # reverse the list to give precedence to directories that
119 # appear first in BBPATH
120 checksum_paths.reverse()
122 checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
124 parser = base_chk_load_parser(checksum_files)
126 bb.note("No conf/checksums.ini found, not checking checksums")
129 bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
132 pv = bb.data.getVar('PV', d, True)
133 pn = bb.data.getVar('PN', d, True)
137 for url in src_uri.split():
138 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
139 (type,host,path,_,_,params) = bb.decodeurl(url)
140 uri = "%s://%s%s" % (type,host,path)
142 if type in [ "http", "https", "ftp", "ftps" ]:
143 # We provide a default shortcut of plain [] for the first fetch uri
144 # Explicit names in any uri overrides this default.
145 if not "name" in params and first_uri:
148 if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
149 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
150 bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
152 bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
154 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
157 def oe_unpack_file(file, data, url = None):
160 url = "file://%s" % file
161 dots = file.split(".")
162 if dots[-1] in ['gz', 'bz2', 'Z']:
163 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
167 if file.endswith('.tar'):
168 cmd = 'tar x --no-same-owner -f %s' % file
169 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
170 cmd = 'tar xz --no-same-owner -f %s' % file
171 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
172 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
173 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
174 cmd = 'gzip -dc %s > %s' % (file, efile)
175 elif file.endswith('.bz2'):
176 cmd = 'bzip2 -dc %s > %s' % (file, efile)
177 elif file.endswith('.tar.xz'):
178 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
179 elif file.endswith('.xz'):
180 cmd = 'xz -dc %s > %s' % (file, efile)
181 elif file.endswith('.zip') or file.endswith('.jar'):
183 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
186 cmd = "%s '%s'" % (cmd, file)
187 elif os.path.isdir(file):
189 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
191 if file[0:len(fp)] == fp:
192 destdir = file[len(fp):file.rfind('/')]
193 destdir = destdir.strip('/')
196 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
197 os.makedirs("%s/%s" % (os.getcwd(), destdir))
200 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
202 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
203 if not 'patch' in parm:
204 # The "destdir" handling was specifically done for FILESPATH
205 # items. So, only do so for file:// entries.
207 destdir = bb.decodeurl(url)[1] or "."
210 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
211 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
216 dest = os.path.join(os.getcwd(), os.path.basename(file))
217 if os.path.exists(dest):
218 if os.path.samefile(file, dest):
221 # Change to subdir before executing command
222 save_cwd = os.getcwd();
223 parm = bb.decodeurl(url)[5]
225 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
229 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
230 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
231 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
237 addtask unpack after do_fetch
238 do_unpack[dirs] = "${WORKDIR}"
239 python base_do_unpack() {
242 localdata = bb.data.createCopy(d)
243 bb.data.update_data(localdata)
245 src_uri = bb.data.getVar('SRC_URI', localdata)
248 src_uri = bb.data.expand(src_uri, localdata)
249 for url in src_uri.split():
251 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
252 except bb.MalformedUrl, e:
253 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
255 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
256 local = os.path.realpath(local)
257 ret = oe_unpack_file(local, localdata, url)
259 raise bb.build.FuncFailed()
262 addhandler base_eventhandler
263 python base_eventhandler() {
264 from bb import note, error, data
265 from bb.event import Handled, NotHandled, getName
269 if name == "TaskCompleted":
270 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
271 elif name == "UnsatisfiedDep":
272 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
276 # Only need to output when using 1.8 or lower, the UI code handles it
278 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
282 if name.startswith("BuildStarted"):
283 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
284 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
285 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
286 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
289 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
291 for v in needed_vars:
292 val = bb.data.getVar(v, e.data, 1)
293 if not val or val == 'INVALID':
296 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
299 # Handle removing stamps for 'rebuild' task
301 if name.startswith("StampUpdate"):
302 for (fn, task) in e.targets:
303 #print "%s %s" % (task, fn)
304 if task == "do_rebuild":
305 dir = "%s.*" % e.stampPrefix[fn]
306 bb.note("Removing stamps: " + dir)
307 os.system('rm -f '+ dir)
308 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
310 if not data in e.__dict__:
313 log = data.getVar("EVENTLOG", e.data, 1)
315 logfile = file(log, "a")
316 logfile.write("%s\n" % msg)
322 addtask configure after do_unpack do_patch
323 do_configure[dirs] = "${S} ${B}"
324 do_configure[deptask] = "do_populate_staging"
325 base_do_configure() {
329 addtask compile after do_configure
330 do_compile[dirs] = "${S} ${B}"
332 if [ -e Makefile -o -e makefile ]; then
333 oe_runmake || die "make failed"
335 oenote "nothing to compile"
340 addtask install after do_compile
341 do_install[dirs] = "${D} ${S} ${B}"
342 # Remove and re-create ${D} so that is it guaranteed to be empty
343 do_install[cleandirs] = "${D}"
360 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
361 if not source_mirror_fetch:
362 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
365 this_host = bb.data.getVar('HOST_SYS', d, 1)
366 if not re.match(need_host, this_host):
367 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
369 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
372 this_machine = bb.data.getVar('MACHINE', d, 1)
373 if this_machine and not re.match(need_machine, this_machine):
374 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
376 pn = bb.data.getVar('PN', d, 1)
378 # OBSOLETE in bitbake 1.7.4
379 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
381 bb.data.setVar('SRCDATE', srcdate, d)
383 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
385 bb.data.setVar('USE_NLS', use_nls, d)
387 # Git packages should DEPEND on git-native
388 srcuri = bb.data.getVar('SRC_URI', d, 1)
389 if "git://" in srcuri:
390 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
391 depends = depends + " git-native:do_populate_staging"
392 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
394 # 'multimachine' handling
395 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
396 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
398 if (pkg_arch == mach_arch):
399 # Already machine specific - nothing further to do
403 # We always try to scan SRC_URI for urls with machine overrides
404 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
406 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
409 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
410 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
411 if os.path.isdir(path):
414 for s in srcuri.split():
415 if not s.startswith("file://"):
417 local = bb.data.expand(bb.fetch.localpath(s, d), d)
419 if local.startswith(mp):
420 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
421 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
422 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
427 packages = bb.data.getVar('PACKAGES', d, 1).split()
429 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
431 # We could look for != PACKAGE_ARCH here but how to choose
432 # if multiple differences are present?
433 # Look through PACKAGE_ARCHS for the priority order?
434 if pkgarch and pkgarch == mach_arch:
435 multiarch = mach_arch
438 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
441 EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package