# Part of the A-A-P recipe executive: process the command line arguments.

# Copyright (C) 2002 Stichting NLnet Labs
# Permission to copy and use this file is specified in the file COPYING.
# If this file is missing you can find it here: http://www.a-a-p.org/COPYING

import sys
import os
import os.path

import Global
from Node import Node
from Error import *
from Dictlist import dictlist2str, var2dictlist
from Process import Process
from ParsePos import ParsePos
from RecPos import rpcopy, RecPos
from Util import *
from Depend import Depend, depend_auto
from Remote import url_time
from VersCont import refresh_node
from Cache import local_name
from Sign import get_old_sign, get_new_sign, sign_clear, sign_clear_target
from Sign import sign_updated, buildcheckstr2sign, buildcheck_updated
from Commands import aap_eval
from Message import *
from DoRead import recipe_dir
from Filetype import ft_detect
from Work import setrpstack, getwork


def dictlist2shellstr(list, key):
    """Turn a dictlist into a string that can be used as an argument for a
       shell command.
       TODO: let this depend on the type of shell used.
       """
    str = ''
    for i in list:
	if str:
	    str = str + ' '
	for c in i[key]:
	    if string.find("'\" \t", c) >= 0:
		str = str + "\\"
	    str = str + c
    return str


def exec_commands(dep, globals, target):
    """Execute the build commands "dep.commands" to build the Node "target",
       using the info in the Depend object "dep"."""
    work = getwork(globals)

    # Go to the build directory
    recipe_dir(os.path.join(dep.builddir, "monty"))

    # Make a copy of the globals to avoid the commands modify them.
    # set the variables $in, $out and $target
    new_globals = globals.copy()
    new_globals["exports"] = {}
    new_globals["buildtarget"] = target.get_name()
    dl = shorten_dictlist(dep.targetlist)
    work.add_node_attributes(dl)
    xp = Expand(1, Expand.quote_aap)
    new_globals["target"] = dictlist2str(dl, xp)
    new_globals["target_list"] = map(lambda x : x["name"], dl)
    new_globals["target_dl"] = dl

    sl = shorten_dictlist(dep.sourcelist)
    work.add_node_attributes(sl)
    new_globals["source"] = dictlist2str(sl, xp)
    new_globals["source_list"] = map(lambda x : x["name"], sl)
    new_globals["source_dl"] = sl
    new_globals["match"] = dep.matchstr

    # Create a ParsePos object to contain the parse position in the string.
    # Make a copy of the RecPos stack, so that the item on top can be changed.
    # Set the line number to just before where the commands were defined.
    fp = ParsePos(rpcopy(dep.rpstack, dep.rpstack[-1].line_nr - 1),
							 string = dep.commands)

    #
    # Parse and execute the commands.
    #
    Process(fp, new_globals)

    # Move the exported variables to the globals of the current recipe
    exports = new_globals["exports"]
    for e in exports.keys():
	globals[e] = exports[e]


def check_name(itemdict):
    """Return the check name to be used for item with dictlist "itemdict"."""
    if itemdict.has_key("check"):
	check = itemdict["check"]
    # TODO: make mapping from name or filetype to check configurable
    else:
	if itemdict.has_key("filetype"):
	    type = itemdict["filetype"]
	else:
	    type = ft_detect(itemdict["name"])
	if type == "c" or type == "cpp":
	    check = "c_md5"	# default check for C and C++ code
	elif ((itemdict.has_key("directory") and itemdict["directory"])
		or os.path.isdir(itemdict["name"])):
	    check = "none"	# default check for directories: none
	else:
	    check = "md5"	# default check is md5
    return check


def remove_bdir(globals, name):
    """When "name" starts with $BDIR return what follows.
       Otherwise return None.
       "name" must be an absolute path."""
    bd = os.path.abspath(get_var_val(0, globals, "BDIR"))
    if bd[-1] != '/':
	bd = bd + '/'
    bd_len = len(bd)
    if len(name) > bd_len and name[:bd_len] == bd:
	return name[bd_len:]
    return None


class Update:
    """Objects used to remember which source of a dependencey is outdated and
    the newest time for "newer"."""
    def __init__(self):
	self.forced = 0		    # {force} used on target or source
	self.time = 0
	self.time_source = ''
	self.source = ''
	self.buildcheck = ''	    # buildcheck signature

    def outdated(self):
	"""Return TRUE if we already know that updating is required.
	   This doesn't handle a source being newer than the target."""
	return self.forced or self.time or self.source


def dictlist_update(dictlist, work, target, update, rule, autodepend = 1):
    """Go over all items in dictlist "dictlist" and update them.
       Skip the "target" node (avoid files that depend on itself).
       Find out if an item is outdated, using "update".
       When "rule" is non-zero, use $SRCPATH.
       When "autodepend" is zero don't check for automatic dependencies.
       Returns the dictlist of items that have been updated.  This is
       "dictlist" with automatic dependencies added."""
    # Make a copy of the dictlist, it may be extended with automatic
    # dependencies.
    retlist = dictlist[:]

    for src_dict in dictlist:
	# For an absolute path use the source name literally, otherwise use
	# $SRCPATH to locate the source.
	# However, need to add the directory for a child recipe.
	src = src_dict["name"]
	if not os.path.isabs(src) and src_dict.has_key("_node"):
	    src = src_dict["_node"].short_name()

	# Remove "./" things (from "." in $SRCPATH)
	src = os.path.normpath(src)

	from Remote import is_url

	def expand_srcpath(globals, isabs, name, dict):
	    """Expand "name" into a list of names using $SRCPATH.  The
	       "srcpath" attribute in "dict" overrules $SRCPATH."""
	    if not isabs:
		if dict.has_key("srcpath"):
		    srcpath = string2dictlist([], dict["srcpath"])
		else:
		    srcpath = var2dictlist(globals, "SRCPATH")
	    if isabs or not srcpath:
		names = [ name ]
	    else:
		names = []
		for i in srcpath:
		    names.append(os.path.join(i["name"], name))
	    return names

	names = expand_srcpath(work.globals,
			    (os.path.isabs(src) or is_url(src)), src, src_dict)
	if rule:
	    # When "src" starts with "$BDIR/" also try without it.
	    src_no_bdir = remove_bdir(work.globals, os.path.abspath(src))
	    if src_no_bdir:
		names.extend(expand_srcpath(work.globals, 0,
							src_no_bdir, src_dict))

	# Loop over all possible locations for this "src".
	done = 0
	for src_name in names:
	    # Remove "./" things (from $SRCPATH)
	    src_name = os.path.normpath(src_name)

	    node = work.get_node(src_name, 1, src_dict)

	    # Ignore soures that depend on itself.
	    if node == target:
		msg_depend(_('Target depends on itself (ignored): "%s"')
							 % target.short_name())
		done = 2
		break

	    # Try updating the item.  If it works we use it.
	    if target_update(work, node):
		done = 1
		# TRICK: fix the dependency to use this node
		src_dict["name"] = node.get_name()
		src_dict["_node"] = node
		break
	
	if done == 2:
	    continue	# skip source equal to target

	if not done:
	    raise UserError, (_('Do not know how to build "%s"')
							   % shorten_name(src))

	# Find automatic dependencies for this item.  If there are any, also
	# update them (recursively!) and append them to the return value (so
	# that signatures are stored when building succeeds.
	# Don't do this if "autodepend" is zero (items were generated by a
	# recursive autodepend).
	# Do this on the global node, not the item in the dictlist, to re-use
	# dependencies also used in other dictlists.
	# Avoid endless recursion by setting did_auto_depend.
	if autodepend and not node.did_auto_depend:
	    depend_auto(work, node)
	    if node.auto_depend:
		node.did_auto_depend = 1
		retlist.extend(dictlist_update(node.auto_depend, work,
						     target, update, rule,
					autodepend = not node.auto_depend_rec))
		node.did_auto_depend = 0

	# Check if the target needs updating because item "src_dict" is
	# outdated.
	check_need_update(work, update, src_dict, target)

    # Return the list of items that were updated.
    return retlist


def check_need_update(work, update, src_dict, target):
    """Check if node "target" needs to be updated by checking the source
       "src_dict".
       "update" is the Update object used for the target."""
    # Only need to do the check if not updating already.  Saves a bit of time
    # in computing signatures (although they might have to be computed anyway
    # to be able to remember them).
    if not update.outdated():
	if src_dict.has_key("_node"):
	    src_name = src_dict["_node"].get_name()
	else:
	    src_name = src_dict["name"]

	# The "force" attribute forces updating always.
	if src_dict.has_key("force") and src_dict["force"]:
	    update.forced = 1
	    update.source = src_name
	else:
	    check = check_name(src_dict)
	    if check == "newer":
		c = "time"		# "newer" check also uses timestamp
	    else:
		c = check
	    old = get_old_sign(src_name, c, target)
	    new = get_new_sign(work.globals, src_name, c)
	    if new == '' or new == '0':
		raise UserError, _('Building failed for "%s"') % src_name

	    # Update update.time or update.source:
	    # If it's -1 we already know building is to be done.
	    # If the check is "newer" need to find the newest timestamp.
	    # For other checks building is to be done if the sign differs.
	    if check == "newer":
		t = long(new)
		if update.time == 0 or t > update.time:
		    update.time = t
		    update.time_source = src_name
	    elif new != old:
		update.source = src_name


def buildcheck_update(checkstring, dep, work, target, update):
    """Check if the "checkstring" value changed since the last build and
       change "update" accordingly.  "dep.commands" are the commands for the
       build rule."""
    line_nr = dep.rpstack[-1].line_nr
    setrpstack(work.globals, dep.rpstack)

    # If the "buildcheck" attribute is empty, don't check if it changed.
    i = skip_white(checkstring, 0)
    if i >= len(checkstring):
	update.buildcheck = ''
	return

    # Always compute the check signature, it either needs to be compared (not
    # outdate yet) or stored (when outdated already).
    # Only set the value of $xcommands when it's used, it takes some time and
    # may fail.
    work.globals["commands"] = dep.commands
    xp = Expand(0, Expand.quote_aap, skip_errors = 1)
    if string.find(checkstring, "xcommands") > 0:
	work.globals["xcommands"] = aap_eval(line_nr, work.globals,
					     dep.commands, xp, skip_errors = 1)
    update.buildcheck = buildcheckstr2sign(aap_eval(line_nr, work.globals,
					     checkstring, xp, skip_errors = 1))
    del work.globals["commands"]
    if work.globals.has_key("xcommands"):
	del work.globals["xcommands"]

    # Only need to do the check if not updating already.
    if not update.outdated():
	old = get_old_sign("", "buildcheck", target)

	# building is to be done if the signature differs.
	if update.buildcheck != old:
	    update.source = "buildcheck"


def target_rule(work, target, update, src_list_list):
    """Find rules that match "target" and do the work for each of them.
       "src_list_list" is extended for items that need their sign updated.
       Returns a dependency made out of a rule with build commands or None
    """
    #
    # No specific dependency is defined for this target:
    # - Find all matching rules and update their sources.  Remember the
    #   newest timestamp.
    # - Find a matching rule with commands and create a dependency from
    #   that.
    #
    target_name = target.get_name()
    target_sname = target.short_name()
    dep = None

    buildrule = None	# Fully matching build rule or has more than
			    # one source.
    buildrule_len = 0	# length of matching pattern
    buildrule_p = None	# Potential build rule, use when there is no
			    # full match.
    buildrule_p_len = 0	# length of matching pattern of buildrule_p
    double = 0		# set when more than one matching rule found
    double_p = 0		# set when more than one potentitial rule found

    for r in work.rules:
	matchstr, dir, matchlen = r.match_target(target_name, target_sname)
	if matchstr:
	    src_list = r.target2sourcelist(target_name, target_sname)
	    # Skip rules where the target and source are equal.
	    # Happens for ":rule %.jpg : path/%.jpg".
	    if len(src_list) == 1 and src_list[0]["name"] == target_name:
		continue

	    # Check if all the sources exist.
	    # When not, only remember it as a potential buildrule.
	    full_match = 1
	    for s in src_list:
		if not os.path.exists(s["name"]):
		    # TODO: Should check if this source can be build from
		    # other rules or dependencies.
		    # TODO: remote files
		    full_match = 0
		    break

	    if r.commands:
		# Remember the rule with the longest matching pattern.
		if full_match:
		    if buildrule and matchlen == buildrule_len:
			double = 1
		    if matchlen > buildrule_len:
			buildrule = r
			buildrule_len = matchlen
			double = 0
		else:
		    if buildrule_p and matchlen == buildrule_p_len:
			double_p = 1
		    if matchlen > buildrule_p_len:
			buildrule_p = r
			buildrule_p_len = matchlen
			double_p = 0
	    elif full_match:
		# Matching rule but no commands, need to update it
		msg_depend(_('Using rule "%s : %s" for target "%s"')
				% (dictlist2str(r.targetlist,
					      Expand(0, Expand.quote_aap)),
				   dictlist2str(r.sourcelist,
					      Expand(0, Expand.quote_aap)),
				   target_sname))
		target.current_rule = r
		slist = dictlist_update(src_list, work, target, update, 1)
		target.current_rule = None
		src_list_list.append(slist)

    # Give an error when more than one fully matching rule found or when
    # there is no matching rule and more then one potential rule found.
    if double or (not buildrule and double_p):
	raise UserError, (_('More than one matching build rule for "%s"')
								% target_sname)
    if not buildrule and buildrule_p:
	# Didn't find a fully matching rule, use the potential one.
	buildrule = buildrule_p

    if buildrule:
	msg_depend(_('Using build rule "%s : %s" for target "%s"')
			% (dictlist2str(buildrule.targetlist,
					      Expand(0, Expand.quote_aap)),
			   dictlist2str(buildrule.sourcelist,
					      Expand(0, Expand.quote_aap)),
			   target_sname))

	# Create a dependency to be executed below.
	src_list = buildrule.target2sourcelist(target_name, target_sname)
	dep = Depend(buildrule.target2targetlist(target_name, target_sname),
			buildrule.build_attr,
			src_list,
			work,
			buildrule.rpstack,
			buildrule.commands, buildrule.builddir)
	dep.matchstr, dir, matchlen = buildrule.match_target(target_name,
							      target_sname)

	# Apply attributes from the rule to the target.  Must be before
	# dictlist_update(), because the "virtual" attribute may influence
	# what happens there (e.g., location of sign file).
	for d in dep.targetlist:
	    if d["name"] == target_name:
		target.set_attributes(d)

	# Update sources for this build rule.
	target.current_rule = buildrule
	slist = dictlist_update(src_list, work, target, update, 1)
	target.current_rule = None
	src_list_list.append(slist)

    return dep


def target_update(work, target, toplevel = 0):
    """Update a target by finding the build rule for it and executing the build
       commands if it's outdated.
       "target" is a Node object.
       If "toplevel" is non-zero it is an error not to have build commands.
       Return a non-zero number for success."""
    retval = 1

    # The "comment" target is a special case.
    if target.name == "comment":
	work.print_comments()
	return retval

    # Return right away if this target was already updated.
    if target.status is Node.updated:
	msg_depend(_('Target was already updated: "%s"') % target.short_name())
	return retval

    msg_depend(_('updating target "%s"') % target.short_name())

    # Check if we are trying to recursively update ourselves.
    # TODO: can this be solved or ignored?
    if target.status is Node.busy:
	if target.current_rule:
	    rpstack = target.current_rule.rpstack
	    where = ' from rule'
	elif target.current_dep:
	    rpstack = target.current_dep.rpstack
	    where = ' from dependency'
	else:
	    where = ''
	if where:
	    where = where + (_(' in recipe "%s" line %d')
				     % (rpstack[-1].name, rpstack[-1].line_nr))
	raise UserError, (_('Cyclic dependency for "%s"%s')
						% (target.short_name(), where))

    target.status = Node.busy
    target_name = target.get_name()
    target_sname = target.short_name()

    # save "virtual" attribute, it is restored after done with this dependency.
    save_virtual = target.get_virtual()

    # Use an Update object to remember if any of the sources for this target is
    # outdated, thus we need to build the target.
    # For the "newer" check we need to find the newest timestamp of the
    # sources, update.time is set to the timestamp of the item.
    # For other checks we compare with a previous signature.  If outdated,
    # update.source is set to the name of the outdated source.
    update = Update()
    if target.attributes.has_key("force") and target.attributes["force"]:
	update.forced = 1

    # Remember which lists of sources need to have their signatures updated
    # when building succeeds.
    src_list_list = []

    # Go over all specified dependencies that have this target in their target
    # list and update their sources.  Also find the newest timestamp.
    for d in target.get_dependencies():
	msg_depend(_('Using dependency "%s : %s" for target "%s"')
			    % (dictlist2str(d.targetlist,
						  Expand(0, Expand.quote_aap)),
			       dictlist2str(d.sourcelist,
						  Expand(0, Expand.quote_aap)),
			       target_sname))
	target.current_dep = d
	slist = dictlist_update(d.sourcelist, work, target, update, 0)
	target.current_dep = None
	src_list_list.append(slist)

    # If there is a dependency with commands rules are not applied.
    dep = target.get_first_build_dependency()
    dirmode = target.isdir()

    if not dep and not dirmode:
	# Find matching rules.  One with build commands is turned into a
	# dependency.
	dep = target_rule(work, target, update, src_list_list)
	if not dep:
	    # If the target doesn't exist and has the "refresh" attribute, try
	    # refreshing it.  This may use a cached file.
	    if (not os.path.exists(target_name)
				     and target.attributes.has_key("refresh")):
		if target.current_rule:
		    rpstack = target.current_rule.rpstack
		elif target.current_dep:
		    rpstack = target.current_dep.rpstack
		else:
		    rpstack = []
		refresh_node(rpstack, work.globals, target, 1)
	    if target.get_virtual():
		msg_depend(_('Virtual target has no build commands: "%s"') \
								% target_sname)
	    elif toplevel or not os.path.exists(target_name):
		msg_depend(_('Do not know how to build "%s"') % target_sname)
		retval = 0
	    else:
		msg_depend(_('Target has no build commands and exists: "%s"') \
								% target_sname)

    #
    # If there is a dependency with build commands, execute it.
    #
    if dep:
	# the "finally" and "refresh" target may have multiple build commands
	deplist = target.get_build_dependencies()
	if not deplist:
	    deplist = [ dep ]
	for d in deplist:
	    # Execute the dependency commands if the target is outdated.
	    may_exec_depend(work, d, update, target, src_list_list)

    elif dirmode:
	# Target is a directory and has no specific build commands: create the
	# directory if it doesn't exist.
	if os.path.exists(target_name):
	    if not os.path.isdir(target_name):
		msg_warning(_('WARNING: Target is not a directory: "%s"')
								% target_sname)
	    else:
		msg_depend(_('Directory exists: "%s"') % target_sname)
	else:
	    try:
		import types

		# If a mode like "0777" was specified, use it.
		if isinstance(dirmode, types.StringType) and dirmode[0] == '0':
		    os.makedirs(target_name, oct2int(dirmode))
		else:
		    os.makedirs(target_name)
		msg_info(_('Created directory: "%s"') % target_name)
	    except EnvironmentError, e:
		recipe_error([], (_('Could not create directory "%s"')
						       % target_name) + str(e))

    elif target.name == "refresh":
	# Automatically refresh all nodes with an "refresh" attribute.
	retval = 1
	for node in work.nodes:
	    # Only need to refresh a node when:
	    # - it has an "refresh" attribute
	    # - the node doesn't exist yet
	    # - it does exist and the "constant" attribute isn't set
	    if node.attributes.has_key("refresh") and node.may_refresh():
		if not refresh_node([], work.globals, node, 0):
		    retval = 0


    if retval:
	target.status = Node.updated
    else:
	target.status = Node.new

    target.attributes["virtual"] = save_virtual	# restore "virtual" attribute

    return retval


def may_exec_depend(work, dep, update, target, src_list_list):
    """Execute the build commands of dependency "dep" if the target "target" is
       outdated.
       When updating succeeds, remember the signs for the items in
       "src_list_list"."""
    target_name = target.get_name()
    target_sname = target.short_name()

    # Get timestamp for the target.  If this fails it probably doesn't
    # exist and needs to be rebuild (dst_time will be zero).
    dst_time = url_time(work.globals, target_name)
    if dst_time > 0 and target.get_virtual():
	msg_warning(_('Warning: target is virtual but does exist: "%s"')
							     % target_sname)

    # If a "buildcheck" attribute is defined for the build commands, check
    # if it has changed.  Otherwise check if the build commands changed.
    if dep.build_attr.has_key("buildcheck"):
	buildcheck = dep.build_attr["buildcheck"]
    else:
	buildcheck = "$xcommands"
    buildcheck_update(buildcheck, dep, work, target, update)

    # If a target is older than any source or one of the targets in the
    # dependency doesn't exist: execute the commands.
    if update.forced:
	if update.source:
	    reason = _('"%s" has force attribute') % shorten_name(update.source)
	else:
	    reason = _('target "%s" has force attribute') % target_sname
    elif update.source:
	reason = _('"%s" has changed') % shorten_name(update.source)
    elif dst_time < update.time:
	reason = _('"%s" is newer') % update.time_source
    elif dst_time == 0 and not target.get_virtual():
	reason = _("it doesn't exist")
    elif dep.sourcelist == [] and target.get_virtual():
	reason = _('virtual target without dependencies')
    else:
	reason = ''
    if reason:
	msg_depend(_('Updating "%s" from "%s": %s') % (target_sname,
		       dictlist2str(shorten_dictlist(dep.sourcelist)), reason))

	# Create $BDIR if "target" starts with it.
	if remove_bdir(work.globals, target_name):
	    # Make sure the directory for target_name exists.
	    aap_checkdir(dep.rpstack, target_name)

	# Execute the build commands.
	exec_commands(dep, work.globals, target)

	# Check that the target was really updated.  Only when it exists.
	if dst_time > 0 and not target.get_virtual():
	    t = url_time(work.globals, target_name)
	    if t == 0:
		raise UserError, _('Building failed for "%s"') % n
	    # if t == dst_time:
	    #   Could give an error here, but on fast machines it often happens
	    #   while nothing is wrong.

	# Update the signatures for all targets.  We assume they were all
	# updated, even though the build commands may skip some.
	for trg in dep.targetlist:
	    # Can there be a target without a node???
	    if not trg.has_key("_node"):
		continue
	    node = trg["_node"]
	    # Skip "refresh" and "finally", they are always rebuild
	    if node.name in Global.virtual_targets:
		continue

	    # Target changed, clear its cached signatures.
	    sign_clear(node.get_name())

	    # Remember the signatures of the sources now.  Do this for sources
	    # that the target depends on.  Any old signatures for this target
	    # can be removed, they are invalid anyway.
	    sign_clear_target(node)
	    for l in src_list_list:
		for s in l:
		    # for a dependency we know the node, need to find it for a
		    # rule 
		    if s.has_key("_node"):
			n = s["_node"]
		    else:
			n = work.find_node(s["name"])
		    # Call check_name() again, because creating the item may
		    # cause the filetype to change.
		    sign_updated(work.globals, n.get_name(),
							   check_name(s), node)

	    # Also remember the buildcheck signature.
	    if update.buildcheck:
		buildcheck_updated(node, update.buildcheck)

	    # The target has changed, need to redo the automatic dependencies.
	    node.auto_depend = None

    else:
	msg_depend(_('Target "%s" is up-to-date') % target_sname)


def build_autodepend(work, target, stype, source):
    """Find an autodepend rule to make a dependency recipe from an "stype" file.
       When a match is found execute the build commands for Node "target" from
       Node "source".
       Unlike using pattern rules, this does NOT update the source.
       Return non-zero for success (the target is up-to-date)."""
    # Search all defined autodepends for one where stype matches.
    autodepend = None
    for r in work.autodepends:
	for src_dict in r.sourcelist:
	    if src_dict["name"] == stype:
		autodepend = r
		break
	if autodepend:
	    break
    if not autodepend:
	return 0	# Didn't find a matching autodepend.

    msg_depend(_('Using autodepend "%s" for source "%s"')
		    % (dictlist2str(autodepend.sourcelist,
						  Expand(0, Expand.quote_aap)),
		       source.short_name()))

    # Use an Update object to remember if any of the sources for this target is
    # outdated, thus we need to build the target.
    update = Update()

    # Check the signature of the source.  Use the attributes from the
    # autodepend plus those from the source Node.
    src_dict = src_dict.copy()
    src_dict["name"] = source.get_name()
    src_dict["_node"] = source
    for k in source.attributes.keys():
	src_dict[k] = source.attributes[k]
    check_need_update(work, update, src_dict, target)

    aap_checkdir(autodepend.rpstack, target.get_name())

    # If the autodepend used the {recursive} attribute, need to carry this over
    # to the source node.
    if autodepend.build_attr.has_key("recursive"):
	source.auto_depend_rec = autodepend.build_attr["recursive"]
    else:
	source.auto_depend_rec = 0

    # If the recipe exists and was generated recursively, the recipe depends on
    # all files mentioned in it.  This makes sure that the dependencies are
    # updated when "foo.c" includes "common.h" and "common.h" was changed.
    src_list = [ src_dict ]
    if source.auto_depend_rec and os.path.exists(target.get_name()):
	from Depend import read_auto_depend
	auto_dep = read_auto_depend(target, source.get_name())
	work.dictlist_nodes(auto_dep)
	for a in auto_dep:
	    check_need_update(work, update, a, target)
	src_list.extend(auto_dep)

    # Create a dependency to be executed below.
    from Dictlist import string2dictlist
    dep = Depend(string2dictlist(autodepend.rpstack, target.get_name()),
		    autodepend.build_attr,
		    string2dictlist(autodepend.rpstack, source.get_name()),
		    work, autodepend.rpstack,
		    autodepend.commands, autodepend.builddir)

    # Execute the build commands of the dependency when the target is outdated.
    may_exec_depend(work, dep, update, target, [ src_list ])

    return 1


def default_targets(work):
    """Decide what the default target(s) is/are for this recipe."""
    if work.globals.has_key("TARGET") and work.globals["TARGET"]:
	# Use the targets from $TARGET.
	msg_depend(_('Building target(s) specified with $TARGET'))

	def dictlist2node(x):
	    t = work.get_node(x["name"], 0, x)
	    return t

	targets = map(dictlist2node, var2dictlist(work.globals, "TARGET"))

    elif work.find_node("all", ""):
	# Use the "all" target.
	msg_depend(_('Building the "all" target'))
	targets = [ work.find_node("all", "") ]

    elif work.dependencies:
	# Use the first target encountered as a dependency.
	msg_depend(_('Building target(s) for the first encountered dependency'))
	dl = work.dependencies[0].targetlist[0]
	targets = [ dl["_node"] ]
    else:
	raise UserError, _("No target specified")

    return targets


def dobuild(work):
    """
    Build the specified targets.
    """

    #
    # Execute all the "-c command" arguments.
    #
    if Global.cmd_args.options.has_key("command"):
	cmdlist = Global.cmd_args.options["command"]
    else:
	cmdlist = []
    for cmd in cmdlist:
	# Create a ParsePos object to contain the parse position in the string.
	# Make a new rpstack to be able to give useful error messages.
	fp = ParsePos([ RecPos(_('Command line argument "%s"') % cmd, 0) ],
							   string = cmd + '\n')
	# Parse and execute the commands.
	Process(fp, work.globals)

    # if there is a "-c command" argument and no targets we are done.
    if cmdlist and not Global.cmd_args.targets:
	return


    #
    # Build the specified or default targets.
    #
    if Global.cmd_args.targets:
	# Targets specified as an argument.
	msg_depend(_('Building targets specified on command line'))
	update_target = work.find_node("update", "")

	targets = []
	for t in Global.cmd_args.targets:
	    if t == "update" and (not update_target
					    or not update_target.dependencies):
		# An "update" target for which there is no explicit dependency
		# is turned into "refresh" and then build the default
		# target(s).
		msg_depend(_('"update" builds "refresh" and the default target'))
		targets.append(work.get_node("refresh", ""))
		targets.extend(default_targets(work))
	    else:
		# Find an existing Node or create a new one.
		targets.append(work.get_node(t))
    
    else:
	targets = default_targets(work)

    # Update the toplevel targets.
    for target in targets:
	if not target_update(work, target, 1):
	    raise UserError, (_('Do not know how to build "%s"')
							 % target.short_name())

    # Update the "finally" target if it exists.
    target = work.find_node("finally", "")
    if target:
	msg_depend(_('Building the "finally" target'))
	target_update(work, target)


# vim: set sw=4 sts=4 tw=79 fo+=l:
