1 BB_DEFAULT_TASK ?= "build"
30 oenote ${MAKE} ${EXTRA_OEMAKE} "$@"
31 ${MAKE} ${EXTRA_OEMAKE} "$@" || die "oe_runmake failed"
34 def base_dep_prepend(d):
36 # Ideally this will check a flag so we will operate properly in
37 # the case where host == build == target, for now we don't work in
40 deps = "shasum-native coreutils-native"
41 if bb.data.getVar('PN', d, True) == "shasum-native" or bb.data.getVar('PN', d, True) == "stagemanager-native":
43 if bb.data.getVar('PN', d, True) == "coreutils-native":
44 deps = "shasum-native"
46 # INHIBIT_DEFAULT_DEPS doesn't apply to the patch command. Whether or not
47 # we need that built is the responsibility of the patch function / class, not
49 if not bb.data.getVar('INHIBIT_DEFAULT_DEPS', d):
50 if (bb.data.getVar('HOST_SYS', d, 1) !=
51 bb.data.getVar('BUILD_SYS', d, 1)):
52 deps += " virtual/${TARGET_PREFIX}gcc virtual/libc "
55 DEPENDS_prepend="${@base_dep_prepend(d)} "
56 DEPENDS_virtclass-native_prepend="${@base_dep_prepend(d)} "
57 DEPENDS_virtclass-nativesdk_prepend="${@base_dep_prepend(d)} "
60 SCENEFUNCS += "base_scenefunction"
62 python base_scenefunction () {
63 stamp = bb.data.getVar('STAMP', d, 1) + ".needclean"
64 if os.path.exists(stamp):
65 bb.build.exec_func("do_clean", d)
68 python base_do_setscene () {
69 for f in (bb.data.getVar('SCENEFUNCS', d, 1) or '').split():
70 bb.build.exec_func(f, d)
71 if not os.path.exists(bb.data.getVar('STAMP', d, 1) + ".do_setscene"):
72 bb.build.make_stamp("do_setscene", d)
74 do_setscene[selfstamp] = "1"
75 addtask setscene before do_fetch
78 do_fetch[dirs] = "${DL_DIR}"
79 do_fetch[depends] = "shasum-native:do_populate_staging"
80 python base_do_fetch() {
83 localdata = bb.data.createCopy(d)
84 bb.data.update_data(localdata)
86 src_uri = bb.data.getVar('SRC_URI', localdata, 1)
91 bb.fetch.init(src_uri.split(),d)
92 except bb.fetch.NoMethodError:
93 (type, value, traceback) = sys.exc_info()
94 raise bb.build.FuncFailed("No method: %s" % value)
95 except bb.MalformedUrl:
96 (type, value, traceback) = sys.exc_info()
97 raise bb.build.FuncFailed("Malformed URL: %s" % value)
100 bb.fetch.go(localdata)
101 except bb.fetch.MissingParameterError:
102 (type, value, traceback) = sys.exc_info()
103 raise bb.build.FuncFailed("Missing parameters: %s" % value)
104 except bb.fetch.FetchError:
105 (type, value, traceback) = sys.exc_info()
106 raise bb.build.FuncFailed("Fetch failed: %s" % value)
107 except bb.fetch.MD5SumError:
108 (type, value, traceback) = sys.exc_info()
109 raise bb.build.FuncFailed("MD5 failed: %s" % value)
111 (type, value, traceback) = sys.exc_info()
112 raise bb.build.FuncFailed("Unknown fetch Error: %s" % value)
115 # Verify the SHA and MD5 sums we have in OE and check what do
117 checksum_paths = bb.data.getVar('BBPATH', d, True).split(":")
119 # reverse the list to give precedence to directories that
120 # appear first in BBPATH
121 checksum_paths.reverse()
123 checksum_files = ["%s/conf/checksums.ini" % path for path in checksum_paths]
125 parser = base_chk_load_parser(checksum_files)
127 bb.note("No conf/checksums.ini found, not checking checksums")
130 bb.note("Creating the CheckSum parser failed: %s:%s" % (sys.exc_info()[0], sys.exc_info()[1]))
133 pv = bb.data.getVar('PV', d, True)
134 pn = bb.data.getVar('PN', d, True)
138 for url in src_uri.split():
139 localpath = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
140 (type,host,path,_,_,params) = bb.decodeurl(url)
141 uri = "%s://%s%s" % (type,host,path)
143 if type in [ "http", "https", "ftp", "ftps" ]:
144 # We provide a default shortcut of plain [] for the first fetch uri
145 # Explicit names in any uri overrides this default.
146 if not "name" in params and first_uri:
149 if not (base_chk_file_vars(parser, localpath, params, d) or base_chk_file(parser, pn, pv,uri, localpath, d)):
150 if not bb.data.getVar("OE_ALLOW_INSECURE_DOWNLOADS", d, True):
151 bb.fatal("%s-%s: %s has no checksum defined, cannot check archive integrity" % (pn,pv,uri))
153 bb.note("%s-%s: %s has no checksum defined, archive integrity not checked" % (pn,pv,uri))
155 raise bb.build.FuncFailed("Checksum of '%s' failed" % uri)
158 def oe_unpack_file(file, data, url = None):
161 url = "file://%s" % file
162 dots = file.split(".")
163 if dots[-1] in ['gz', 'bz2', 'Z']:
164 efile = os.path.join(bb.data.getVar('WORKDIR', data, 1),os.path.basename('.'.join(dots[0:-1])))
168 if file.endswith('.tar'):
169 cmd = 'tar x --no-same-owner -f %s' % file
170 elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
171 cmd = 'tar xz --no-same-owner -f %s' % file
172 elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
173 cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
174 elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
175 cmd = 'gzip -dc %s > %s' % (file, efile)
176 elif file.endswith('.bz2'):
177 cmd = 'bzip2 -dc %s > %s' % (file, efile)
178 elif file.endswith('.tar.xz'):
179 cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
180 elif file.endswith('.xz'):
181 cmd = 'xz -dc %s > %s' % (file, efile)
182 elif file.endswith('.zip') or file.endswith('.jar'):
184 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
187 cmd = "%s '%s'" % (cmd, file)
188 elif os.path.isdir(file):
190 filespath = bb.data.getVar("FILESPATH", data, 1).split(":")
192 if file[0:len(fp)] == fp:
193 destdir = file[len(fp):file.rfind('/')]
194 destdir = destdir.strip('/')
197 elif not os.access("%s/%s" % (os.getcwd(), destdir), os.F_OK):
198 os.makedirs("%s/%s" % (os.getcwd(), destdir))
201 cmd = 'cp -pPR %s %s/%s/' % (file, os.getcwd(), destdir)
203 (type, host, path, user, pswd, parm) = bb.decodeurl(url)
204 if not 'patch' in parm:
205 # The "destdir" handling was specifically done for FILESPATH
206 # items. So, only do so for file:// entries.
208 destdir = bb.decodeurl(url)[1] or "."
211 bb.mkdirhier("%s/%s" % (os.getcwd(), destdir))
212 cmd = 'cp %s %s/%s/' % (file, os.getcwd(), destdir)
217 dest = os.path.join(os.getcwd(), os.path.basename(file))
218 if os.path.exists(dest):
219 if os.path.samefile(file, dest):
222 # Change to subdir before executing command
223 save_cwd = os.getcwd();
224 parm = bb.decodeurl(url)[5]
226 newdir = ("%s/%s" % (os.getcwd(), parm['subdir']))
230 cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, 1), cmd)
231 bb.note("Unpacking %s to %s/" % (base_path_out(file, data), base_path_out(os.getcwd(), data)))
232 ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
238 addtask unpack after do_fetch
239 do_unpack[dirs] = "${WORKDIR}"
240 python base_do_unpack() {
243 localdata = bb.data.createCopy(d)
244 bb.data.update_data(localdata)
246 src_uri = bb.data.getVar('SRC_URI', localdata)
249 src_uri = bb.data.expand(src_uri, localdata)
250 for url in src_uri.split():
252 local = bb.data.expand(bb.fetch.localpath(url, localdata), localdata)
253 except bb.MalformedUrl, e:
254 raise bb.build.FuncFailed('Unable to generate local path for malformed uri: %s' % e)
256 raise bb.build.FuncFailed('Unable to locate local file for %s' % url)
257 local = os.path.realpath(local)
258 ret = oe_unpack_file(local, localdata, url)
260 raise bb.build.FuncFailed()
263 addhandler base_eventhandler
264 python base_eventhandler() {
265 from bb import note, error, data
266 from bb.event import Handled, NotHandled, getName
270 if name == "TaskCompleted":
271 msg = "package %s: task %s is complete." % (data.getVar("PF", e.data, 1), e.task)
272 elif name == "UnsatisfiedDep":
273 msg = "package %s: dependency %s %s" % (e.pkg, e.dep, name[:-3].lower())
277 # Only need to output when using 1.8 or lower, the UI code handles it
279 if (int(bb.__version__.split(".")[0]) <= 1 and int(bb.__version__.split(".")[1]) <= 8):
283 if name.startswith("BuildStarted"):
284 bb.data.setVar( 'BB_VERSION', bb.__version__, e.data )
285 statusvars = bb.data.getVar("BUILDCFG_VARS", e.data, 1).split()
286 statuslines = ["%-17s = \"%s\"" % (i, bb.data.getVar(i, e.data, 1) or '') for i in statusvars]
287 statusmsg = "\n%s\n%s\n" % (bb.data.getVar("BUILDCFG_HEADER", e.data, 1), "\n".join(statuslines))
290 needed_vars = bb.data.getVar("BUILDCFG_NEEDEDVARS", e.data, 1).split()
292 for v in needed_vars:
293 val = bb.data.getVar(v, e.data, 1)
294 if not val or val == 'INVALID':
297 bb.fatal('The following variable(s) were not set: %s\nPlease set them directly, or choose a MACHINE or DISTRO that sets them.' % ', '.join(pesteruser))
300 # Handle removing stamps for 'rebuild' task
302 if name.startswith("StampUpdate"):
303 for (fn, task) in e.targets:
304 #print "%s %s" % (task, fn)
305 if task == "do_rebuild":
306 dir = "%s.*" % e.stampPrefix[fn]
307 bb.note("Removing stamps: " + dir)
308 os.system('rm -f '+ dir)
309 os.system('touch ' + e.stampPrefix[fn] + '.needclean')
311 if not data in e.__dict__:
314 log = data.getVar("EVENTLOG", e.data, 1)
316 logfile = file(log, "a")
317 logfile.write("%s\n" % msg)
323 addtask configure after do_unpack do_patch
324 do_configure[dirs] = "${S} ${B}"
325 do_configure[deptask] = "do_populate_staging"
326 base_do_configure() {
330 addtask compile after do_configure
331 do_compile[dirs] = "${S} ${B}"
333 if [ -e Makefile -o -e makefile ]; then
334 oe_runmake || die "make failed"
336 oenote "nothing to compile"
341 addtask install after do_compile
342 do_install[dirs] = "${D} ${S} ${B}"
343 # Remove and re-create ${D} so that is it guaranteed to be empty
344 do_install[cleandirs] = "${D}"
361 source_mirror_fetch = bb.data.getVar('SOURCE_MIRROR_FETCH', d, 0)
362 if not source_mirror_fetch:
363 need_host = bb.data.getVar('COMPATIBLE_HOST', d, 1)
366 this_host = bb.data.getVar('HOST_SYS', d, 1)
367 if not re.match(need_host, this_host):
368 raise bb.parse.SkipPackage("incompatible with host %s" % this_host)
370 need_machine = bb.data.getVar('COMPATIBLE_MACHINE', d, 1)
373 this_machine = bb.data.getVar('MACHINE', d, 1)
374 if this_machine and not re.match(need_machine, this_machine):
375 raise bb.parse.SkipPackage("incompatible with machine %s" % this_machine)
377 pn = bb.data.getVar('PN', d, 1)
379 # OBSOLETE in bitbake 1.7.4
380 srcdate = bb.data.getVar('SRCDATE_%s' % pn, d, 1)
382 bb.data.setVar('SRCDATE', srcdate, d)
384 use_nls = bb.data.getVar('USE_NLS_%s' % pn, d, 1)
386 bb.data.setVar('USE_NLS', use_nls, d)
388 # Git packages should DEPEND on git-native
389 srcuri = bb.data.getVar('SRC_URI', d, 1)
390 if "git://" in srcuri:
391 depends = bb.data.getVarFlag('do_fetch', 'depends', d) or ""
392 depends = depends + " git-native:do_populate_staging"
393 bb.data.setVarFlag('do_fetch', 'depends', depends, d)
395 # unzip-native should already be staged before unpacking ZIP recipes
396 need_unzip = bb.data.getVar('NEED_UNZIP_FOR_UNPACK', d, 1)
397 src_uri = bb.data.getVar('SRC_URI', d, 1)
399 if ".zip" in src_uri or need_unzip == "1":
400 depends = bb.data.getVarFlag('do_unpack', 'depends', d) or ""
401 depends = depends + " unzip-native:do_populate_staging"
402 bb.data.setVarFlag('do_unpack', 'depends', depends, d)
404 # 'multimachine' handling
405 mach_arch = bb.data.getVar('MACHINE_ARCH', d, 1)
406 pkg_arch = bb.data.getVar('PACKAGE_ARCH', d, 1)
408 if (pkg_arch == mach_arch):
409 # Already machine specific - nothing further to do
413 # We always try to scan SRC_URI for urls with machine overrides
414 # unless the package sets SRC_URI_OVERRIDES_PACKAGE_ARCH=0
416 override = bb.data.getVar('SRC_URI_OVERRIDES_PACKAGE_ARCH', d, 1)
419 for p in [ "${PF}", "${P}", "${PN}", "files", "" ]:
420 path = bb.data.expand(os.path.join("${FILE_DIRNAME}", p, "${MACHINE}"), d)
421 if os.path.isdir(path):
424 for s in srcuri.split():
425 if not s.startswith("file://"):
427 local = bb.data.expand(bb.fetch.localpath(s, d), d)
429 if local.startswith(mp):
430 #bb.note("overriding PACKAGE_ARCH from %s to %s" % (pkg_arch, mach_arch))
431 bb.data.setVar('PACKAGE_ARCH', "${MACHINE_ARCH}", d)
432 bb.data.setVar('MULTIMACH_ARCH', mach_arch, d)
437 packages = bb.data.getVar('PACKAGES', d, 1).split()
439 pkgarch = bb.data.getVar("PACKAGE_ARCH_%s" % pkg, d, 1)
441 # We could look for != PACKAGE_ARCH here but how to choose
442 # if multiple differences are present?
443 # Look through PACKAGE_ARCHS for the priority order?
444 if pkgarch and pkgarch == mach_arch:
445 multiarch = mach_arch
448 bb.data.setVar('MULTIMACH_ARCH', multiarch, d)
451 EXPORT_FUNCTIONS do_setscene do_fetch do_unpack do_configure do_compile do_install do_package