Added tundra executables

This commit is contained in:
Daniel Collin 2014-09-14 08:36:58 +02:00
parent 69e453b658
commit fe82123a28
146 changed files with 19104 additions and 0 deletions

View File

@ -0,0 +1,41 @@
--
-- strict.lua
-- checks uses of undeclared global variables
-- All global variables must be 'declared' through a regular assignment
-- (even assigning nil will do) in a main chunk before being used
-- anywhere or assigned to inside a function.
--
local getinfo, error, rawset, rawget = debug.getinfo, error, rawset, rawget
local mt = getmetatable(_G)
if mt == nil then
mt = {}
setmetatable(_G, mt)
end
mt.__declared = {}
local function what ()
local d = getinfo(3, "S")
return d and d.what or "C"
end
mt.__newindex = function (t, n, v)
if not mt.__declared[n] then
local w = what()
if w ~= "main" and w ~= "C" then
error("assign to undeclared variable '"..n.."'", 2)
end
mt.__declared[n] = true
end
rawset(t, n, v)
end
mt.__index = function (t, n)
if not mt.__declared[n] and what() ~= "C" then
error("variable '"..n.."' is not declared", 2)
end
return rawget(t, n)
end

View File

@ -0,0 +1,43 @@
require "strict"
local boot = require "tundra.boot"
local actions = {
['generate-dag'] = function(build_script)
assert(build_script, "need a build script name")
boot.generate_dag_data(build_script)
end,
['generate-ide-files'] = function(build_script, ide_script)
assert(build_script, "need a build script name")
assert(ide_script, "need a generator name")
boot.generate_ide_files(build_script, ide_script)
end,
['selftest'] = function()
require "tundra.selftest"
end
}
local function main(action_name, ...)
assert(action_name, "need an action")
local action = actions[action_name]
assert(action, "unknown action '" .. action_name .. "'")
-- check if debugger was requested
for i, v in ipairs(arg) do
if v == "--lua-debugger" then
table.remove(arg, i)
require "tundra.debugger"
pause()
break
end
end
action(unpack(arg))
end
return {
main = main
}

View File

@ -0,0 +1,142 @@
module(..., package.seeall)
-- Use "strict" when developing to flag accesses to nil global variables
-- This has very low perf impact (<0.1%), so always leave it on.
require "strict"
local os = require "os"
local platform = require "tundra.platform"
local util = require "tundra.util"
local depgraph = require "tundra.depgraph"
local unitgen = require "tundra.unitgen"
local buildfile = require "tundra.buildfile"
local native = require "tundra.native"
-- This trio is so useful we want them everywhere without imports.
function _G.printf(msg, ...)
local str = string.format(msg, ...)
print(str)
end
function _G.errorf(msg, ...)
local str = string.format(msg, ...)
error(str)
end
function _G.croak(msg, ...)
local str = string.format(msg, ...)
io.stderr:write(str, "\n")
os.exit(1)
end
-- Expose benchmarking function for those times everything sucks
--
-- Wrap a function so that it prints execution times.
--
-- Usage:
-- foo = bench("foo", foo) -- benchmark function foo
function _G.bench(name, fn)
return function (...)
local t1 = native.get_timer()
local result = { fn(...) }
local t2 = native.get_timer()
printf("%s: %ss", name, native.timerdiff(t1, t2))
return unpack(result)
end
end
local environment = require "tundra.environment"
local nodegen = require "tundra.nodegen"
local decl = require "tundra.decl"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local dagsave = require "tundra.dagsave"
_G.SEP = platform.host_platform() == "windows" and "\\" or "/"
_G.Options = {
FullPaths = 1
}
local function make_default_env(build_data, add_unfiltered_vars)
local default_env = environment.create()
default_env:set_many {
["OBJECTROOT"] = "t2-output",
["SEP"] = SEP,
}
local host_platform = platform.host_platform()
do
local mod_name = "tundra.host." .. host_platform
local mod = require(mod_name)
mod.apply_host(default_env)
end
-- Add any unfiltered entries from the build data's Env and ReplaceEnv to the
-- default environment. For config environments, this will be false, because we
-- want to wait until the config's tools have run before adding any user
-- customizations.
if add_unfiltered_vars then
if build_data.Env then
nodegen.append_filtered_env_vars(default_env, build_data.Env, nil, true)
end
if build_data.ReplaceEnv then
nodegen.replace_filtered_env_vars(default_env, build_data.ReplaceEnv, nil, true)
end
end
return default_env
end
function generate_dag_data(build_script_fn)
local build_data = buildfile.run(build_script_fn)
local env = make_default_env(build_data.BuildData, false)
local raw_nodes, node_bindings = unitgen.generate_dag(
build_data.BuildTuples,
build_data.BuildData,
build_data.Passes,
build_data.Configs,
env)
dagsave.save_dag_data(
node_bindings,
build_data.DefaultVariant,
build_data.DefaultSubVariant,
build_data.ContentDigestExtensions,
build_data.Options)
end
function generate_ide_files(build_script_fn, ide_script)
-- We are generating IDE integration files. Load the specified
-- integration module rather than DAG builders.
--
-- Also, default to using full paths for all commands to aid with locating
-- sources better.
Options.FullPaths = 1
local build_data = buildfile.run(build_script_fn)
local build_tuples = assert(build_data.BuildTuples)
local raw_data = assert(build_data.BuildData)
local passes = assert(build_data.Passes)
local env = make_default_env(raw_data, true)
if not ide_script:find('.', 1, true) then
ide_script = 'tundra.ide.' .. ide_script
end
require(ide_script)
-- Generate dag
local raw_nodes, node_bindings = unitgen.generate_dag(
build_data.BuildTuples,
build_data.BuildData,
build_data.Passes,
build_data.Configs,
env)
-- Pass the build tuples directly to the generator and let it write
-- files.
nodegen.generate_ide_files(build_tuples, build_data.DefaultNodes, raw_nodes, env, raw_data.IdeGenerationHints, ide_script)
end

View File

@ -0,0 +1,253 @@
module(..., package.seeall)
local util = require "tundra.util"
local native = require "tundra.native"
local function mk_defvariant(name)
return { Name = name; Options = {} }
end
local default_variants = {
mk_defvariant "debug",
mk_defvariant "production",
mk_defvariant "release"
}
local default_subvariants = {
"default"
}
local _config_class = {}
-- Table constructor to make tundra.lua syntax a bit nicer in the Configs array
function _G.Config(args)
local name = args.Name
if not name then
error("no `Name' specified for configuration")
end
if not name:match("^[%w_]+-[%w_]+$") then
errorf("configuration name %s doesn't follow <platform>-<toolset> pattern", name)
end
if args.SubConfigs then
if not args.DefaultSubConfig then
errorf("configuration %s has `SubConfigs' but no `DefaultSubConfig'", name)
end
end
return setmetatable(args, _config_class)
end
local function analyze_targets(configs, variants, subvariants)
local build_tuples = {}
local build_configs = {}
local build_variants = {}
local build_subvariants = {}
for _, cfg in pairs(configs) do
if not cfg.Virtual then -- skip virtual configs
if not cfg.SupportedHosts then
if cfg.DefaultOnHost then
if type(cfg.DefaultOnHost) == "table" then
cfg.SupportedHosts = cfg.DefaultOnHost
else
cfg.SupportedHosts = { cfg.DefaultOnHost }
end
else
printf("1.0-compat: config %s doesn't specify SupportedHosts -- will never be built", cfg.Name);
cfg.SupportedHosts = { }
end
end
local lut = util.make_lookup_table(cfg.SupportedHosts)
if lut[native.host_platform] then
build_configs[#build_configs + 1] = cfg
end
end
end
for _, var in pairs(variants) do build_variants[#build_variants + 1] = var end
for var, _ in pairs(subvariants) do build_subvariants[#build_subvariants + 1] = var end
for _, config in ipairs(build_configs) do
if config.Virtual then
croak("can't build configuration %s directly; it is a support configuration only", config.Name)
end
for _, variant in ipairs(build_variants) do
for _, subvariant in ipairs(build_subvariants) do
build_tuples[#build_tuples + 1] = { Config = config, Variant = variant, SubVariant = subvariant }
end
end
end
if #build_tuples == 0 then
errorf("no build tuples available\n")
end
return build_tuples
end
-- Custom pcall error handler to scan for syntax errors (thrown as tables) and
-- report them without a backtrace, trying to get the filename and line number
-- right so the user can fix their build file.
function syntax_error_catcher(err_obj)
if type(err_obj) == "table" and err_obj.Class and err_obj.Message then
local i = 1
-- Walk down the stack until we find a function that isn't sourced from
-- a file. These have 'source' names that don't start with an @ sign.
-- Because we read all files into memory before executing them, this
-- will give us the source filename of the user script.
while true do
local info = debug.getinfo(i, 'Sl')
--print(util.tostring(info))
if not info then
break
end
if info.what == "C" or (info.source:sub(1, 1) == "@" and info.source ~= "@units.lua") then
i = i + 1
else
local fn = info.source
if info.source:sub(1, 1) == "@" then
fn = info.source:sub(2)
end
if info.currentline == -1 then
return string.format("%s: %s", err_obj.Class, err_obj.Message)
else
return string.format("%s(%d): %s: %s", fn, info.currentline, err_obj.Class, err_obj.Message)
end
end
end
return string.format("%s: %s", err_obj.Class, err_obj.Message)
else
return debug.traceback(err_obj, 2)
end
end
-- A place to store the result of the user's build script calling Build()
local build_result = nil
-- The Build function is the main entry point for "tundra.lua" when invoked.
function _G.Build(args)
if type(args.Configs) ~= "table" or #args.Configs == 0 then
croak("Need at least one config; got %s", util.tostring(args.Configs or "none at all"))
end
local configs, variants, subvariants = {}, {}, {}
-- Legacy support: run "Config" constructor automatically on naked tables
-- passed in Configs array.
for idx = 1, #args.Configs do
local cfg = args.Configs[idx]
if getmetatable(cfg) ~= _config_class then
cfg = Config(cfg)
args.Configs[idx] = cfg
end
configs[cfg.Name] = cfg
end
for _, dir in util.nil_ipairs(args.ScriptDirs) do
-- Make sure dir is sane and ends with a slash
dir = dir:gsub("[/\\]", SEP):gsub("[/\\]$", "")
local expr = dir .. SEP .. "?.lua"
-- Add user toolset dir first so they can override builtin scripts.
package.path = expr .. ";" .. package.path
end
if args.Variants then
for i, x in ipairs(args.Variants) do
if type(x) == "string" then
args.Variants[i] = mk_defvariant(x)
else
assert(x.Name)
if not x.Options then
x.Options = {}
end
end
end
end
local variant_array = args.Variants or default_variants
for _, variant in ipairs(variant_array) do variants[variant.Name] = variant end
local subvariant_array = args.SubVariants or default_subvariants
for _, subvariant in ipairs(subvariant_array) do subvariants[subvariant] = true end
local default_variant = variant_array[1]
if args.DefaultVariant then
for _, x in ipairs(variant_array) do
if x.Name == args.DefaultVariant then
default_variant = x
end
end
end
local default_subvariant = args.DefaultSubVariant or subvariant_array[1]
local build_tuples = analyze_targets(configs, variants, subvariants)
local passes = args.Passes or { Default = { Name = "Default", BuildOrder = 1 } }
printf("%d valid build tuples", #build_tuples)
-- Validate pass data
for id, data in pairs(passes) do
if not data.Name then
croak("Pass %s has no Name attribute", id)
elseif not data.BuildOrder then
croak("Pass %s has no BuildOrder attribute", id)
end
end
-- Assume syntax for C and DotNet is always needed
-- for now. Could possible make an option for which generator sets to load
-- in the future.
require "tundra.syntax.native"
require "tundra.syntax.dotnet"
build_result = {
BuildTuples = build_tuples,
BuildData = args,
Passes = passes,
Configs = configs,
DefaultVariant = default_variant,
DefaultSubVariant = default_subvariant,
ContentDigestExtensions = args.ContentDigestExtensions,
Options = args.Options,
}
end
function run(build_script_fn)
local f, err = io.open(build_script_fn, 'r')
if not f then
croak("%s", err)
end
local text = f:read("*all")
f:close()
local script_globals, script_globals_mt = {}, {}
script_globals_mt.__index = _G
setmetatable(script_globals, script_globals_mt)
local chunk, error_msg = loadstring(text, build_script_fn)
if not chunk then
croak("%s", error_msg)
end
setfenv(chunk, script_globals)
local success, result = xpcall(chunk, syntax_error_catcher)
if not success then
print("Build script execution failed")
croak("%s", result or "")
end
local result = build_result
build_result = nil
return result
end

View File

@ -0,0 +1,409 @@
module(..., package.seeall)
local depgraph = require "tundra.depgraph"
local util = require "tundra.util"
local scanner = require "tundra.scanner"
local dirwalk = require "tundra.dirwalk"
local platform = require "tundra.platform"
local native = require "tundra.native"
local njson = require "tundra.native.json"
local path = require "tundra.path"
local dag_dag_magic = 0x15890105
local function get_passes(nodes)
local result = {}
local seen_passes = {}
for _, node in ipairs(nodes) do
local p = node.pass
if not seen_passes[p] then
assert(type(p) == "table", "Passes must be tables, have " .. util.tostring(p))
assert(type(p.BuildOrder) == "number", "Pass BuildOrder must be a number")
result[#result + 1] = p
seen_passes[p] = true
end
end
table.sort(result, function (a, b) return a.BuildOrder < b.BuildOrder end)
local pass_lookup = {}
for index, pass in ipairs(result) do
pass_lookup[pass] = index - 1
end
return result, pass_lookup
end
local function setup_input_deps(nodes)
local producers = {}
local cwd = native.getcwd() .. SEP
local filter
if native.host_platform == 'windows' or native.host_platform == 'macosx' then
filter = function (str) return str:lower() end
else
filter = function (str) return str end
end
local node_deps = {}
-- Record producing node for all output files
for _, n in ipairs(nodes) do
for _, output in util.nil_ipairs(n.outputs) do
if not path.is_absolute(output) then
output = cwd .. output
end
output = filter(output)
if producers[output] then
errorf("file %s set to be written by more than one target:\n%s\n%s\n",
output, n.annotation, producers[output].annotation)
end
producers[output] = n
end
if n.deps then
node_deps[n] = util.make_lookup_table(n.deps)
end
end
-- Map input files to dependencies
for _, n in ipairs(nodes) do
for _, inputf in util.nil_ipairs(n.inputs) do
if not path.is_absolute(inputf) then
inputf = cwd .. inputf
end
inputf = filter(inputf)
local producer = producers[inputf]
local deps_lut = node_deps[n]
if producer and (not deps_lut or not deps_lut[producer]) then
n.deps[#n.deps + 1] = producer
if not deps_lut then
deps_lut = {}
node_deps[n] = deps_lut
end
deps_lut[producer] = true
end
end
end
end
local function get_scanners(nodes)
local scanners = {}
local scanner_to_index = {}
for _, node in ipairs(nodes) do
local scanner = node.scanner
if scanner and not scanner_to_index[scanner] then
scanner_to_index[scanner] = #scanners
scanners[#scanners + 1] = scanner
end
end
return scanners, scanner_to_index
end
local function save_passes(w, passes)
w:begin_array("Passes")
for _, s in ipairs(passes) do
w:write_string(s.Name)
end
w:end_array()
end
local function save_scanners(w, scanners)
w:begin_array("Scanners")
for _, s in ipairs(scanners) do
w:begin_object()
w:write_string(s.Kind, 'Kind')
w:begin_array("IncludePaths")
for _, path in util.nil_ipairs(s.Paths) do
w:write_string(path)
end
w:end_array()
-- Serialize specialized state for generic scanners
if s.Kind == 'generic' then
w:write_bool(s.RequireWhitespace, 'RequireWhitespace')
w:write_bool(s.UseSeparators, 'UseSeparators')
w:write_bool(s.BareMeansSystem, 'BareMeansSystem')
w:begin_array('Keywords')
for _, kw in util.nil_ipairs(s.Keywords) do
w:write_string(kw)
end
w:end_array()
w:begin_array('KeywordsNoFollow')
for _, kw in util.nil_ipairs(s.KeywordsNoFollow) do
w:write_string(kw)
end
w:end_array()
end
w:end_object()
end
w:end_array()
end
local function save_nodes(w, nodes, pass_to_index, scanner_to_index)
w:begin_array("Nodes")
for idx, node in ipairs(nodes) do
w:begin_object()
assert(idx - 1 == node.index)
if node.action then
w:write_string(node.action, "Action")
end
if node.preaction then
w:write_string(node.preaction, "PreAction")
end
w:write_string(node.annotation, "Annotation")
w:write_number(pass_to_index[node.pass], "PassIndex")
if #node.deps > 0 then
w:begin_array("Deps")
for _, dep in ipairs(node.deps) do
w:write_number(dep.index)
end
w:end_array()
end
local function dump_file_list(list, name)
if list and #list > 0 then
w:begin_array(name)
for _, fn in ipairs(list) do
w:write_string(fn)
end
w:end_array(name)
end
end
dump_file_list(node.inputs, "Inputs")
dump_file_list(node.outputs, "Outputs")
dump_file_list(node.aux_outputs, "AuxOutputs")
-- Save environment strings
local env_count = 0
for k, v in util.nil_pairs(node.env) do
env_count = env_count + 1
end
if env_count > 0 then
w:begin_array("Env")
for k, v in pairs(node.env) do
w:begin_object()
w:write_string(k, "Key")
w:write_string(v, "Value")
w:end_object()
end
w:end_array()
end
if node.scanner then
w:write_number(scanner_to_index[node.scanner], "ScannerIndex")
end
if node.overwrite_outputs then
w:write_bool(true, "OverwriteOutputs")
end
if node.is_precious then
w:write_bool(true, "PreciousOutputs")
end
if node.expensive then
w:write_bool(true, "Expensive")
end
w:end_object()
end
w:end_array()
end
local function save_configs(w, bindings, default_variant, default_subvariant)
local configs = {}
local variants = {}
local subvariants = {}
local config_index = {}
local variant_index = {}
local subvariant_index = {}
local default_config = nil
local host_platform = platform.host_platform()
for _, b in ipairs(bindings) do
if not configs[b.Config.Name] then
configs[b.Config.Name] = #config_index
config_index[#config_index+1] = b.Config.Name
end
if not variants[b.Variant.Name] then
variants[b.Variant.Name] = #variant_index
variant_index[#variant_index+1] = b.Variant.Name
end
if not subvariants[b.SubVariant] then
subvariants[b.SubVariant] = #subvariant_index
subvariant_index[#subvariant_index+1] = b.SubVariant
end
if b.Config.DefaultOnHost == host_platform then
default_config = b.Config
end
end
assert(#config_index > 0)
assert(#variant_index > 0)
assert(#subvariant_index > 0)
local function dump_str_array(array, name)
if array and #array > 0 then
w:begin_array(name)
for _, name in ipairs(array) do
w:write_string(name)
end
w:end_array()
end
end
w:begin_object("Setup")
dump_str_array(config_index, "Configs")
dump_str_array(variant_index, "Variants")
dump_str_array(subvariant_index, "SubVariants")
w:begin_array("BuildTuples")
for index, binding in ipairs(bindings) do
w:begin_object()
w:write_number(configs[binding.Config.Name], "ConfigIndex")
w:write_number(variants[binding.Variant.Name], "VariantIndex")
w:write_number(subvariants[binding.SubVariant], "SubVariantIndex")
local function store_node_index_array(nodes, name)
w:begin_array(name)
for _, node in util.nil_ipairs(nodes) do
w:write_number(node.index)
end
w:end_array()
end
store_node_index_array(binding.AlwaysNodes, "AlwaysNodes")
store_node_index_array(binding.DefaultNodes, "DefaultNodes")
w:begin_object("NamedNodes")
for name, node in pairs(binding.NamedNodes) do
w:write_number(node.index, name)
end
w:end_object()
w:end_object()
end
w:end_array()
-- m_DefaultBuildTuple
w:begin_object("DefaultBuildTuple")
if default_config then
w:write_number(configs[default_config.Name], "ConfigIndex")
else
w:write_number(-1, "ConfigIndex")
end
if default_variant then
w:write_number(variants[default_variant.Name], "VariantIndex")
else
w:write_number(-1, "VariantIndex")
end
if default_subvariant then
w:write_number(subvariants[default_subvariant], "SubVariantIndex")
else
w:write_number(-1, "SubVariantIndex")
end
w:end_object()
w:end_object()
end
local function save_signatures(w, accessed_lua_files)
w:begin_array("FileSignatures")
for _, fn in ipairs(accessed_lua_files) do
w:begin_object()
local stat = native.stat_file(fn)
if not stat.exists then
errorf("accessed file %s is gone: %s", fn, err)
end
w:write_string(fn, "File")
w:write_number(stat.timestamp, "Timestamp")
w:end_object()
end
w:end_array()
w:begin_array("GlobSignatures")
local globs = dirwalk.all_queries()
for _, glob in ipairs(globs) do
w:begin_object()
w:write_string(glob.Path, "Path")
w:begin_array("Files")
for _, fn in ipairs(glob.Files) do w:write_string(fn) end
w:end_array()
w:begin_array("SubDirs")
for _, fn in ipairs(glob.SubDirs) do w:write_string(fn) end
w:end_array()
w:end_object()
end
w:end_array()
end
local function check_deps(nodes)
for _, node in ipairs(nodes) do
for _ , dep in ipairs(node.deps) do
if dep.pass.BuildOrder > node.pass.BuildOrder then
errorf("%s (pass: %s) depends on %s in later pass (%s)", node.annotation, node.pass.Name, dep.annotation, dep.pass.Name)
end
end
end
end
function save_dag_data(bindings, default_variant, default_subvariant, content_digest_exts, misc_options)
-- Call builtin function to get at accessed file table
local accessed_lua_files = util.table_keys(get_accessed_files())
misc_options = misc_options or {}
local max_expensive_jobs = misc_options.MaxExpensiveJobs or -1
printf("save_dag_data: %d bindings, %d accessed files", #bindings, #accessed_lua_files)
local nodes = depgraph.get_all_nodes()
-- Set node indices
for idx, node in ipairs(nodes) do
node.index = idx - 1
end
-- Set up array of passes
local passes, pass_to_index = get_passes(nodes)
-- Hook up dependencies due to input files
setup_input_deps(nodes)
check_deps(nodes)
-- Find scanners
local scanners, scanner_to_index = get_scanners(nodes)
local w = njson.new('.tundra2.dag.json')
w:begin_object()
save_configs(w, bindings, default_variant, default_subvariant)
save_passes(w, passes)
save_scanners(w, scanners)
save_nodes(w, nodes, pass_to_index, scanner_to_index)
save_signatures(w, accessed_lua_files)
if content_digest_exts and #content_digest_exts > 0 then
w:begin_array("ContentDigestExtensions")
for _, ext in ipairs(content_digest_exts) do
w:write_string(ext)
end
w:end_array()
end
w:write_number(max_expensive_jobs, "MaxExpensiveCount")
w:end_object()
w:close()
end

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local functions = {}
local _decl_meta = {}
_decl_meta.__index = _decl_meta
local current = nil
local function new_parser()
local obj = {
Functions = {},
Results = {},
DefaultTargets = {},
AlwaysTargets = {},
}
local outer_env = _G
local iseval = nodegen.is_evaluator
local function indexfunc(tab, var)
if iseval(var) then
-- Return an anonymous function such that
-- the code "Foo { ... }" will result in a call to
-- "nodegen.evaluate('Foo', { ... })"
return function (data)
local result = nodegen.evaluate(var, data)
obj.Results[#obj.Results + 1] = result
return result
end
end
local p = obj.Functions[var]
if p then return p end
return outer_env[var]
end
obj.FunctionMeta = { __index = indexfunc, __newindex = error }
obj.FunctionEnv = setmetatable({}, obj.FunctionMeta)
for name, fn in pairs(functions) do
obj.Functions[name] = setfenv(fn, obj.FunctionEnv)
end
obj.Functions["Default"] = function(default_obj)
obj.DefaultTargets[#obj.DefaultTargets + 1] = default_obj
end
obj.Functions["Always"] = function(always_obj)
obj.AlwaysTargets[#obj.AlwaysTargets + 1] = always_obj
end
current = setmetatable(obj, _decl_meta)
return current
end
function add_function(name, fn)
assert(name and fn)
functions[name] = fn
if current then
-- require called from within unit script
current.Functions[name] = setfenv(fn, current.FunctionEnv)
end
end
function _decl_meta:parse_rec(data)
local chunk
if type(data) == "table" then
for _, gen in ipairs(data) do
self:parse_rec(gen)
end
return
elseif type(data) == "function" then
chunk = data
elseif type(data) == "string" then
chunk = assert(loadfile(data))
else
croak("unknown type %s for unit_generator %q", type(data), tostring(data))
end
setfenv(chunk, self.FunctionEnv)
chunk()
end
function parse(data)
p = new_parser()
current = p
p:parse_rec(data)
current = nil
return p.Results, p.DefaultTargets, p.AlwaysTargets
end

View File

@ -0,0 +1,155 @@
module(..., package.seeall)
local boot = require "tundra.boot"
local util = require "tundra.util"
local path = require "tundra.path"
local native = require "tundra.native"
local environment = require "tundra.environment"
local default_pass = { Name = "Default", BuildOrder = 100000 }
local all_nodes = {}
local _node_mt = {}
_node_mt.__index = _node_mt
function make_node(data_)
local env_ = data_.Env
assert(environment.is_environment(env_), "Env must be provided")
local root_path = native.getcwd() .. env_:get('SEP')
local function path_for_cmdline(p)
local full_path
if path.is_absolute(p) then
full_path = p
else
full_path = root_path .. p
end
if full_path:find(' ', 1, true) then
return '"' .. full_path .. '"'
else
return full_path
end
end
local function normalize_paths(paths)
return util.mapnil(paths, function (x)
if type(x) == "string" then
local v = env_:interpolate(x)
v = path.normalize(v)
return v
else
return x
end
end)
end
-- these are the inputs that $(<) expand to
local regular_inputs = normalize_paths(data_.InputFiles)
-- these are other, auxillary input files that shouldn't appear on the command line
-- useful to e.g. add an input dependency on a tool
local implicit_inputs = normalize_paths(data_.ImplicitInputs)
local inputs = util.merge_arrays_2(regular_inputs, implicit_inputs)
local outputs = normalize_paths(data_.OutputFiles)
local inputs_sorted = inputs and util.clone_array(inputs) or {}
local outputs_sorted = outputs and util.clone_array(outputs) or {}
local cmdline_inputs = util.merge_arrays(regular_inputs, data_.InputFilesUntracked)
table.sort(inputs_sorted)
table.sort(outputs_sorted)
-- Quote the paths before interpolation into the command line
local expand_env = {
['<'] = util.mapnil(cmdline_inputs, path_for_cmdline),
['@'] = util.mapnil(outputs, path_for_cmdline),
}
local expand_env_pretty = {
['<'] = cmdline_inputs,
['@'] = outputs,
}
local overwrite = true
if type(data_.OverwriteOutputs) ~= "nil" then
overwrite = data_.OverwriteOutputs
end
if data_.Scanner and not data_.Scanner.Kind then
errorf("Missing scanner kind")
end
-- make sure dependencies are unique
local unique_deps = util.uniq(data_.Dependencies or {})
local params = {
pass = data_.Pass or default_pass,
scanner = data_.Scanner,
deps = unique_deps,
inputs = inputs_sorted,
outputs = outputs_sorted,
is_precious = data_.Precious,
expensive = data_.Expensive,
overwrite_outputs = overwrite,
src_env = env_,
env = env_.external_vars,
aux_outputs = util.mapnil(data_.AuxOutputFiles, function (x)
local result = env_:interpolate(x, expand_env)
return path.normalize(result)
end),
}
if data_.Action then
params.action = env_:interpolate(data_.Action, expand_env)
else
assert(0 == #params.outputs, "can't have output files without an action")
params.action = ""
end
if data_.PreAction then
params.preaction = env_:interpolate(data_.PreAction, expand_env)
end
params.annotation = env_:interpolate(data_.Label or "?", expand_env_pretty)
local result = setmetatable(params, _node_mt)
-- Stash node
all_nodes[#all_nodes + 1] = result
return result
end
function is_node(obj)
return getmetatable(obj) == _node_mt
end
function _node_mt:insert_output_files(tab, exts)
if exts then
local lut = util.make_lookup_table(exts)
for _, fn in ipairs(self.outputs) do
local ext = path.get_extension(fn)
if lut[ext] then
tab[#tab + 1] = fn
end
end
else
for _, fn in ipairs(self.outputs) do
tab[#tab + 1] = fn
end
end
end
function _node_mt:insert_deps(tab)
for _, dep in util.nil_ipairs(self.deps) do
tab[#tab + 1] = dep
end
end
function get_all_nodes()
return all_nodes
end

View File

@ -0,0 +1,41 @@
module(..., package.seeall)
local native = require "tundra.native"
-- Stash of all dir walks performed for signature generation.
local query_records = {}
function walk(path, filter_callback)
local dir_stack = { path }
local paths_out = {}
while #dir_stack > 0 do
local dir = dir_stack[#dir_stack]
table.remove(dir_stack)
local subdirs, files = native.list_directory(dir)
query_records[dir] = { Files = files, SubDirs = subdirs }
for _, subdir in ipairs(subdirs) do
full_dir_path = dir .. SEP .. subdir
if not filter_callback or filter_callback(subdir) then
table.insert(dir_stack, full_dir_path)
end
end
for _, file in ipairs(files) do
table.insert(paths_out, dir .. SEP .. file)
end
end
return paths_out
end
function all_queries()
local result = {}
for k, v in pairs(query_records) do
result[#result + 1] = { Path = k, Files = v.Files, SubDirs = v.SubDirs }
end
return result
end

View File

@ -0,0 +1,316 @@
module(..., package.seeall)
local util = require 'tundra.util'
local path = require 'tundra.path'
local depgraph = require 'tundra.depgraph'
local nenv = require 'tundra.environment.native'
local os = require 'os'
local global_setup = {}
--[==[
The environment is a holder for variables and their associated values. Values
are always kept as tables, even if there is only a single value.
FOO = { a b c }
e:interpolate("$(FOO)") -> "a b c"
e:interpolate("$(FOO:j, )") -> "a, b, c"
e:interpolate("$(FOO:p-I)") -> "-Ia -Ib -Ic"
Missing keys trigger errors unless a default value is specified.
]==]--
local envclass = {}
function envclass:create(parent, assignments, obj)
obj = obj or {}
setmetatable(obj, self)
self.__index = self
obj.cached_interpolation = {}
obj.vars = {}
obj.parent = parent
obj.lookup = { obj.vars }
obj.memos = {}
obj.memo_keys = {}
obj.external_vars = parent and util.clone_table(parent.external_vars) or nil
-- assign initial bindings
if assignments then
obj:set_many(assignments)
end
return obj
end
function envclass:clone(assignments)
return envclass:create(self, assignments)
end
function envclass:register_implicit_make_fn(ext, fn, docstring)
if type(ext) ~= "string" then
errorf("extension must be a string")
end
if type(fn) ~= "function" then
errorf("fn must be a function")
end
if not ext:match("^%.") then
ext = "." .. ext -- we want the dot in the extension
end
if not self._implicit_exts then
self._implicit_exts = {}
end
self._implicit_exts[ext] = {
Function = fn,
Doc = docstring or "",
}
end
function envclass:get_implicit_make_fn(filename)
local ext = path.get_extension(filename)
local chain = self
while chain do
local t = chain._implicit_exts
if t then
local v = t[ext]
if v then return v.Function end
end
chain = chain.parent
end
return nil
end
function envclass:has_key(key)
local chain = self
while chain do
if chain.vars[key] then
return true
end
chain = chain.parent
end
return false
end
function envclass:get_vars()
return self.vars
end
function envclass:set_many(table)
for k, v in pairs(table) do
self:set(k, v)
end
end
function envclass:append(key, value)
if type(value) ~= "string" then
error("environment append: " .. util.tostring(value) .. " is not a string", 2)
end
self:invalidate_memos(key)
local t = self:get_list(key, 1)
local result
if type(t) == "table" then
result = util.clone_array(t)
table.insert(result, value)
else
result = { value }
end
self.vars[key] = result
end
function envclass:append_many(data)
for k, v in pairs(data) do
self:append(k, v)
end
end
function envclass:replace(key, value)
if type(value) == "string" then
value = { value }
end
assert(type(value) == "table")
self:invalidate_memos(key)
self.vars[key] = value
end
function envclass:invalidate_memos(key)
self.cached_interpolation = {}
local name_tab = self.memo_keys[key]
if name_tab then
for name, _ in pairs(name_tab) do
self.memos[name] = nil
end
end
end
function envclass:set_default(key, value)
if not self:has_key(key) then
self:set(key, value)
end
end
function envclass:set_default_many(table)
for key, value in pairs(table) do
self:set_default(key, value)
end
end
function envclass:set(key, value)
self:invalidate_memos(key)
assert(key:len() > 0, "key must not be empty")
assert(type(key) == "string", "key must be a string")
if type(value) == "string" then
if value:len() > 0 then
self.vars[key] = { value }
else
-- let empty strings make empty tables
self.vars[key] = {}
end
elseif type(value) == "table" then
-- FIXME: should filter out empty values
for _, v in ipairs(value) do
if not type(v) == "string" then
error("key " .. key .. "'s table value contains non-string value " .. tostring(v))
end
end
self.vars[key] = util.clone_array(value)
else
error("key " .. key .. "'s value is neither table nor string: " .. tostring(value))
end
end
function envclass:get_id()
return self.id
end
function envclass:get(key, default)
local v = self.vars[key]
if v then
return table.concat(v, " ")
elseif self.parent then
return self.parent:get(key, default)
elseif default then
return default
else
error(string.format("key '%s' not present in environment", key))
end
end
function envclass:get_list(key, default)
local v = self.vars[key]
if v then
return v -- FIXME: this should be immutable from the outside
elseif self.parent then
return self.parent:get_list(key, default)
elseif default then
return default
elseif not key then
error("nil key is not allowed")
else
error(string.format("key '%s' not present in environment", key))
end
end
function envclass:get_parent()
return self.parent
end
function envclass:interpolate(str, vars)
local cached = self.cached_interpolation[str]
if not cached then
cached = nenv.interpolate(str, self)
self.cached_interpolation[str] = cached
end
if vars then
return nenv.interpolate(cached, self, vars)
else
return cached
end
end
function create(parent, assignments, obj)
return envclass:create(parent, assignments, obj)
end
function envclass:record_memo_var(key, name)
local tab = self.memo_keys[key]
if not tab then
tab = {}
self.memo_keys[key] = tab
end
tab[name] = true
end
function envclass:memoize(key, name, fn)
local memo = self.memos[name]
if not memo then
self:record_memo_var(key, name)
memo = fn()
self.memos[name] = memo
end
return memo
end
function envclass:get_external_env_var(key)
local chain = self
while chain do
local t = self.external_vars
if t then
local v = t[key]
if v then return v end
end
chain = chain.parent
end
return os.getenv(key)
end
function envclass:set_external_env_var(key, value)
local t = self.external_vars
if not t then
t = {}
self.external_vars = t
end
t[key] = value
end
function envclass:add_setup_function(fn)
local t = self.setup_funcs
if not t then
t = {}
self.setup_funcs = t
end
t[#t + 1] = fn
end
function envclass:run_setup_functions()
for _, func in ipairs(global_setup) do
func(self)
end
t = self.setup_funcs
local chain = self
while chain do
for _, func in util.nil_ipairs(chain.setup_funcs) do
func(self)
end
chain = chain.parent
end
end
function add_global_setup(fn)
global_setup[#global_setup + 1] = fn
end
function is_environment(datum)
return getmetatable(datum) == envclass
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".dylib",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "",
["HOSTPROGSUFFIX"] = ".exe",
["HOSTSHLIBSUFFIX"] = ".dll",
["_COPY_FILE"] = "copy $(<) $(@)",
["_HARDLINK_FILE"] = "copy /f $(<) $(@)",
}
end

View File

@ -0,0 +1,864 @@
module(..., package.seeall)
local native = require "tundra.native"
local nodegen = require "tundra.nodegen"
local path = require "tundra.path"
local util = require "tundra.util"
LF = '\r\n'
local UTF_HEADER = '\239\187\191' -- byte mark EF BB BF
local VERSION_NUMBER = "12.00"
local VERSION_YEAR = "2012"
local HOOKS = {}
local msvc_generator = {}
msvc_generator.__index = msvc_generator
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary", "CSharpExe", "CSharpLib", "ObjGroup",
}
local toplevel_stuff = util.make_lookup_table {
".exe", ".lib", ".dll",
}
local binary_extension = util.make_lookup_table {
".exe", ".lib", ".dll", ".pdb", ".res", ".obj", ".o", ".a",
}
local header_exts = util.make_lookup_table {
".h", ".hpp", ".hh", ".inl",
}
-- Scan for sources, following dependencies until those dependencies seem to be
-- a different top-level unit
local function get_sources(dag, sources, generated, level, dag_lut)
for _, output in util.nil_ipairs(dag.outputs) do
local ext = path.get_extension(output)
if not binary_extension[ext] then
generated[output] = true
sources[output] = true -- pick up generated headers
end
end
for _, input in util.nil_ipairs(dag.inputs) do
local ext = path.get_extension(input)
if not binary_extension[ext] then
sources[input] = true
end
end
for _, dep in util.nil_ipairs(dag.deps) do
if not dag_lut[dep] then -- don't go into other top-level DAGs
get_sources(dep, sources, generated, level + 1, dag_lut)
end
end
end
function get_guid_string(data)
local sha1 = native.digest_guid(data)
local guid = sha1:sub(1, 8) .. '-' .. sha1:sub(9,12) .. '-' .. sha1:sub(13,16) .. '-' .. sha1:sub(17,20) .. '-' .. sha1:sub(21, 32)
assert(#guid == 36)
return guid:upper()
end
local function get_headers(unit, source_lut, dag_lut, name_to_dags)
local src_dir = ''
if not unit.Decl then
-- Ignore ExternalLibrary and similar that have no data.
return
end
if unit.Decl.SourceDir then
src_dir = unit.Decl.SourceDir .. '/'
end
for _, src in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Sources)) do
if type(src) == "string" then
local ext = path.get_extension(src)
if header_exts[ext] then
local full_path = path.normalize(src_dir .. src)
source_lut[full_path] = true
end
end
end
local function toplevel(u)
if type(u) == "string" then
return type(name_to_dags[u]) ~= "nil"
end
for _, dag in pairs(u.Decl.__DagNodes) do
if dag_lut[dag] then
return true
end
end
return false
end
-- Repeat for dependencies ObjGroups
for _, dep in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Depends)) do
if not toplevel(dep) then
get_headers(dep, source_lut, dag_lut)
end
end
end
local function make_meta_project(base_dir, data)
data.Guid = get_guid_string(data.Name)
data.IdeGenerationHints = { Msvc = { SolutionFolder = "Build System Meta" } }
data.IsMeta = true
data.RelativeFilename = data.Name .. ".vcxproj"
data.Filename = base_dir .. data.RelativeFilename
data.Type = "meta"
if not data.Sources then
data.Sources = {}
end
return data
end
local function tundra_cmdline(args)
local root_dir = native.getcwd()
return "\"" .. TundraExePath .. "\" -C \"" .. root_dir .. "\" " .. args
end
local function project_regen_commandline(ide_script)
return tundra_cmdline("-g " .. ide_script)
end
local function make_project_data(units_raw, env, proj_extension, hints, ide_script)
-- Filter out stuff we don't care about.
local units = util.filter(units_raw, function (u)
return u.Decl.Name and project_types[u.Keyword]
end)
local base_dir = hints.MsvcSolutionDir and (hints.MsvcSolutionDir .. '\\') or env:interpolate('$(OBJECTROOT)$(SEP)')
native.mkdir(base_dir)
local project_by_name = {}
local all_sources = {}
local dag_node_lut = {} -- lookup table of all named, top-level DAG nodes
local name_to_dags = {} -- table mapping unit name to array of dag nodes (for configs)
-- Map out all top-level DAG nodes
for _, unit in ipairs(units) do
local decl = unit.Decl
local dag_nodes = assert(decl.__DagNodes, "no dag nodes for " .. decl.Name)
for build_id, dag_node in pairs(dag_nodes) do
dag_node_lut[dag_node] = unit
local array = name_to_dags[decl.Name]
if not array then
array = {}
name_to_dags[decl.Name] = array
end
array[#array + 1] = dag_node
end
end
local function get_output_project(name)
if not project_by_name[name] then
local relative_fn = name .. proj_extension
project_by_name[name] = {
Name = name,
Sources = {},
RelativeFilename = relative_fn,
Filename = base_dir .. relative_fn,
Guid = get_guid_string(name),
BuildByDefault = hints.BuildAllByDefault,
}
end
return project_by_name[name]
end
-- Sort units based on dependency complexity. We want to visit the leaf nodes
-- first so that any source file references are picked up as close to the
-- bottom of the dependency chain as possible.
local unit_weights = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local stack = { }
for _, dag in pairs(decl.__DagNodes) do
stack[#stack + 1] = dag
end
local weight = 0
while #stack > 0 do
local node = table.remove(stack)
if dag_node_lut[node] then
weight = weight + 1
end
for _, dep in util.nil_ipairs(node.deps) do
stack[#stack + 1] = dep
end
end
unit_weights[unit] = weight
end
table.sort(units, function (a, b)
return unit_weights[a] < unit_weights[b]
end)
-- Keep track of what source files have already been grabbed by other projects.
local grabbed_sources = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local name = decl.Name
local source_lut = {}
local generated_lut = {}
for build_id, dag_node in pairs(decl.__DagNodes) do
get_sources(dag_node, source_lut, generated_lut, 0, dag_node_lut)
end
-- Explicitly add all header files too as they are not picked up from the DAG
-- Also pick up headers from non-toplevel DAGs we're depending on
get_headers(unit, source_lut, dag_node_lut, name_to_dags)
-- Figure out which project should get this data.
local output_name = name
local ide_hints = unit.Decl.IdeGenerationHints
if ide_hints then
if ide_hints.OutputProject then
output_name = ide_hints.OutputProject
end
end
local proj = get_output_project(output_name)
if output_name == name then
-- This unit is the real thing for this project, not something that's
-- just being merged into it (like an ObjGroup). Set some more attributes.
proj.IdeGenerationHints = ide_hints
proj.DagNodes = decl.__DagNodes
proj.Unit = unit
end
for src, _ in pairs(source_lut) do
local norm_src = path.normalize(src)
if not grabbed_sources[norm_src] then
grabbed_sources[norm_src] = unit
local is_generated = generated_lut[src]
proj.Sources[#proj.Sources+1] = {
Path = norm_src,
Generated = is_generated,
}
end
end
end
-- Get all accessed Lua files
local accessed_lua_files = util.table_keys(get_accessed_files())
-- Filter out the ones that belong to this build (exclude ones coming from Tundra)
local function is_non_tundra_lua_file(p)
return not path.is_absolute(p)
end
local function make_src_node(p)
return { Path = path.normalize(p) }
end
local source_list = util.map(util.filter(accessed_lua_files, is_non_tundra_lua_file), make_src_node)
local solution_hints = hints.MsvcSolutions
if not solution_hints then
print("No IdeGenerationHints.MsvcSolutions specified - using defaults")
solution_hints = {
['tundra-generated.sln'] = {}
}
end
local projects = util.table_values(project_by_name)
local vanilla_projects = util.clone_array(projects)
local solutions = {}
-- Create meta project to regenerate solutions/projects. Added to every solution.
local regen_meta_proj = make_meta_project(base_dir, {
Name = "00-Regenerate-Projects",
FriendlyName = "Regenerate Solutions and Projects",
BuildCommand = project_regen_commandline(ide_script),
})
projects[#projects + 1] = regen_meta_proj
for name, data in pairs(solution_hints) do
local sln_projects
local ext_projects = {}
if data.Projects then
sln_projects = {}
for _, pname in ipairs(data.Projects) do
local pp = project_by_name[pname]
if not pp then
errorf("can't find project %s for inclusion in %s -- check your MsvcSolutions data", pname, name)
end
sln_projects[#sln_projects + 1] = pp
end
else
-- All the projects (that are not meta)
sln_projects = util.clone_array(vanilla_projects)
end
for _, ext in util.nil_ipairs(data.ExternalProjects) do
ext_projects[#ext_projects + 1] = ext
end
local meta_proj = make_meta_project(base_dir, {
Name = "00-tundra-" .. path.drop_suffix(name),
FriendlyName = "Build This Solution",
BuildByDefault = true,
Sources = source_list,
BuildProjects = util.clone_array(sln_projects),
})
sln_projects[#sln_projects + 1] = regen_meta_proj
sln_projects[#sln_projects + 1] = meta_proj
projects[#projects + 1] = meta_proj
solutions[#solutions + 1] = {
Filename = base_dir .. name,
Projects = sln_projects,
ExternalProjects = ext_projects,
BuildSolutionProject = meta_proj,
}
end
return solutions, projects
end
local cl_tags = {
['.h'] = 'ClInclude',
['.hh'] = 'ClInclude',
['.hpp'] = 'ClInclude',
['.inl'] = 'ClInclude',
}
local function slurp_file(fn)
local fh, err = io.open(fn, 'rb')
if fh then
local data = fh:read("*all")
fh:close()
return data
end
return ''
end
local function replace_if_changed(new_fn, old_fn)
local old_data = slurp_file(old_fn)
local new_data = slurp_file(new_fn)
if old_data == new_data then
os.remove(new_fn)
return
end
printf("Updating %s", old_fn)
os.remove(old_fn)
os.rename(new_fn, old_fn)
end
function msvc_generator:generate_solution(fn, projects, ext_projects, solution)
local sln = io.open(fn .. '.tmp', 'wb')
sln:write(UTF_HEADER, LF, "Microsoft Visual Studio Solution File, Format Version ", VERSION_NUMBER, LF, "# Visual Studio ", VERSION_YEAR, LF)
-- Map folder names to array of projects under that folder
local sln_folders = {}
for _, proj in ipairs(projects) do
local hints = proj.IdeGenerationHints
local msvc_hints = hints and hints.Msvc or nil
local folder = msvc_hints and msvc_hints.SolutionFolder or nil
if folder then
local projects = sln_folders[folder] or {}
projects[#projects + 1] = proj
sln_folders[folder] = projects
end
end
for _, proj in ipairs(projects) do
local name = proj.Name
local fname = proj.RelativeFilename
local guid = proj.Guid
sln:write(string.format('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "%s", "%s", "{%s}"', name, fname, guid), LF)
sln:write('EndProject', LF)
end
-- Dump external projects. Make them depend on everything in this solution being built by Tundra.
for _, data in util.nil_ipairs(ext_projects) do
local guid = data.Guid
local fname = path.normalize(path.join(native.getcwd(), data.Filename))
local name = path.get_filename_base(fname)
sln:write(string.format('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "%s", "%s", "{%s}"', name, fname, guid), LF)
local build_sln_proj = solution.BuildSolutionProject
if build_sln_proj then
local meta_guid = build_sln_proj.Guid
sln:write('\tProjectSection(ProjectDependencies) = postProject', LF)
sln:write('\t\t{', meta_guid,'} = {', meta_guid,'}', LF)
sln:write('\tEndProjectSection', LF)
end
sln:write('EndProject', LF)
end
for folder_name, _ in pairs(sln_folders) do
local folder_guid = get_guid_string("folder/" .. folder_name)
sln:write(string.format('Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "%s", "%s", "{%s}"', folder_name, folder_name, folder_guid), LF)
sln:write('EndProject', LF)
end
sln:write("Global", LF)
sln:write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution", LF)
for _, tuple in ipairs(self.config_tuples) do
sln:write(string.format('\t\t%s = %s', tuple.MsvcName, tuple.MsvcName), LF)
end
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution", LF)
for _, proj in ipairs(projects) do
for _, tuple in ipairs(self.config_tuples) do
local leader = string.format('\t\t{%s}.%s.', proj.Guid, tuple.MsvcName)
sln:write(leader, "ActiveCfg = ", tuple.MsvcName, LF)
if proj.BuildByDefault then
sln:write(leader, "Build.0 = ", tuple.MsvcName, LF)
end
end
end
-- External projects build by default, and after Tundra is done (depends on "Build this solution").
for _, proj in util.nil_ipairs(ext_projects) do
for _, tuple in ipairs(self.config_tuples) do
local leader = string.format('\t\t{%s}.%s.', proj.Guid, tuple.MsvcName)
sln:write(leader, "ActiveCfg = ", tuple.MsvcName, LF)
if not proj.Platform or proj.Platform == tuple.MsvcPlatform then
sln:write(leader, "Build.0 = ", tuple.MsvcName, LF)
end
end
end
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(SolutionProperties) = preSolution", LF)
sln:write("\t\tHideSolutionNode = FALSE", LF)
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(NestedProjects) = preSolution", LF)
for folder_name, projects in pairs(sln_folders) do
local folder_guid = get_guid_string("folder/" .. folder_name)
for _, project in ipairs(projects) do
sln:write(string.format('\t\t{%s} = {%s}', project.Guid, folder_guid), LF)
end
end
sln:write("\tEndGlobalSection", LF)
sln:write("EndGlobal", LF)
sln:close()
replace_if_changed(fn .. ".tmp", fn)
end
local function find_dag_node_for_config(project, tuple)
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
local nodes = project.DagNodes
if not nodes then
return nil
end
if nodes[build_id] then
return nodes[build_id]
end
errorf("couldn't find config %s for project %s (%d dag nodes) - available: %s",
build_id, project.Name, #nodes, table.concat(util.table_keys(nodes), ", "))
end
function msvc_generator:generate_project(project, all_projects)
local fn = project.Filename
local p = assert(io.open(fn .. ".tmp", 'wb'))
p:write('<?xml version="1.0" encoding="utf-8"?>', LF)
p:write('<Project')
p:write(' DefaultTargets="Build"')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
-- List all project configurations
p:write('\t<ItemGroup Label="ProjectConfigurations">', LF)
for _, tuple in ipairs(self.config_tuples) do
p:write('\t\t<ProjectConfiguration Include="', tuple.MsvcName, '">', LF)
p:write('\t\t\t<Configuration>', tuple.MsvcConfiguration, '</Configuration>', LF)
p:write('\t\t\t<Platform>', tuple.MsvcPlatform, '</Platform>', LF)
p:write('\t\t</ProjectConfiguration>', LF)
end
p:write('\t</ItemGroup>', LF)
p:write('\t<PropertyGroup Label="Globals">', LF)
p:write('\t\t<ProjectGuid>{', project.Guid, '}</ProjectGuid>', LF)
p:write('\t\t<Keyword>MakeFileProj</Keyword>', LF)
if project.FriendlyName then
p:write('\t\t<ProjectName>', project.FriendlyName, '</ProjectName>', LF)
end
if HOOKS.global_properties then
HOOKS.global_properties(p, project)
end
p:write('\t</PropertyGroup>', LF)
p:write('\t<PropertyGroup>', LF)
if VERSION_YEAR == '2012' then
p:write('\t\t<_ProjectFileVersion>10.0.30319.1</_ProjectFileVersion>', LF)
end
p:write('\t</PropertyGroup>', LF)
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />', LF)
-- Mark all project configurations as makefile-type projects
for _, tuple in ipairs(self.config_tuples) do
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'" Label="Configuration">', LF)
p:write('\t\t<ConfigurationType>Makefile</ConfigurationType>', LF)
p:write('\t\t<UseDebugLibraries>true</UseDebugLibraries>', LF) -- I have no idea what this setting affects
if VERSION_YEAR == '2012' then
p:write('\t\t<PlatformToolset>v110</PlatformToolset>', LF) -- I have no idea what this setting affects
elseif VERSION_YEAR == '2013' then
p:write('\t\t<PlatformToolset>v120</PlatformToolset>', LF) -- I have no idea what this setting affects
end
p:write('\t</PropertyGroup>', LF)
end
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />', LF)
for _, tuple in ipairs(self.config_tuples) do
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'">', LF)
local dag_node = find_dag_node_for_config(project, tuple)
local include_paths, defines
if dag_node then
local env = dag_node.src_env
local paths = util.map(env:get_list("CPPPATH"), function (p)
local ip = path.normalize(env:interpolate(p))
if not path.is_absolute(ip) then
ip = native.getcwd() .. '\\' .. ip
end
return ip
end)
include_paths = table.concat(paths, ';')
local ext_paths = env:get_external_env_var('INCLUDE')
if ext_paths then
include_paths = include_paths .. ';' .. ext_paths
end
defines = env:interpolate("$(CPPDEFS:j;)")
else
include_paths = ''
defines = ''
end
local root_dir = native.getcwd()
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
local base = "\"" .. TundraExePath .. "\" -C \"" .. root_dir .. "\" "
local build_cmd = base .. build_id
local clean_cmd = base .. "--clean " .. build_id
local rebuild_cmd = base .. "--rebuild " .. build_id
if project.BuildCommand then
build_cmd = project.BuildCommand
clean_cmd = ""
rebuild_cmd = ""
elseif not project.IsMeta then
build_cmd = build_cmd .. " " .. project.Name
clean_cmd = clean_cmd .. " " .. project.Name
rebuild_cmd = rebuild_cmd .. " " .. project.Name
else
local all_projs_str = table.concat(
util.map(assert(project.BuildProjects), function (p) return p.Name end), ' ')
build_cmd = build_cmd .. " " .. all_projs_str
clean_cmd = clean_cmd .. " " .. all_projs_str
rebuild_cmd = rebuild_cmd .. " " .. all_projs_str
end
p:write('\t\t<NMakeBuildCommandLine>', build_cmd, '</NMakeBuildCommandLine>', LF)
p:write('\t\t<NMakeOutput></NMakeOutput>', LF)
p:write('\t\t<NMakeCleanCommandLine>', clean_cmd, '</NMakeCleanCommandLine>', LF)
p:write('\t\t<NMakeReBuildCommandLine>', rebuild_cmd, '</NMakeReBuildCommandLine>', LF)
p:write('\t\t<NMakePreprocessorDefinitions>', defines, ';$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>', LF)
p:write('\t\t<NMakeIncludeSearchPath>', include_paths, ';$(NMakeIncludeSearchPath)</NMakeIncludeSearchPath>', LF)
p:write('\t\t<NMakeForcedIncludes>$(NMakeForcedIncludes)</NMakeForcedIncludes>', LF)
p:write('\t</PropertyGroup>', LF)
end
if HOOKS.pre_sources then
HOOKS.pre_sources(p, project)
end
-- Emit list of source files
p:write('\t<ItemGroup>', LF)
for _, record in ipairs(project.Sources) do
local path_str = assert(record.Path)
if not path.is_absolute(path_str) then
path_str = native.getcwd() .. '\\' .. path_str
end
local ext = path.get_extension(path_str)
local cl_tag = cl_tags[ext] or 'ClCompile'
p:write('\t\t<', cl_tag,' Include="', path_str, '" />', LF)
end
p:write('\t</ItemGroup>', LF)
local post_src_hook = HOOKS.post_sources
if post_src_hook then
post_src_hook(p, project)
end
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />', LF)
if VERSION_YEAR == "2012" then
-- Import helper msbuild stuff to make build aborting work propertly in VS2012
local xml = path.normalize(TundraScriptDir .. '/tundra/ide/msvc-rules.xml')
p:write('\t<Import Project="', xml, '" />', LF)
end
p:write('</Project>', LF)
p:close()
replace_if_changed(fn .. ".tmp", fn)
end
local function get_common_dir(sources)
local dir_tokens = {}
for _, src in ipairs(sources) do
local path = assert(src.Path)
if not tundra.path.is_absolute(path) then
local subdirs = {}
for subdir in path:gmatch("([^\\\]+)\\") do
subdirs[#subdirs + 1] = subdir
end
if #dir_tokens == 0 then
dir_tokens = subdirs
else
for i = 1, #dir_tokens do
if dir_tokens[i] ~= subdirs[i] then
while #dir_tokens >= i do
table.remove(dir_tokens)
end
break
end
end
end
end
end
local result = table.concat(dir_tokens, '\\')
if #result > 0 then
result = result .. '\\'
end
return result
end
function msvc_generator:generate_project_filters(project)
local fn = project.Filename .. ".filters"
local p = assert(io.open(fn .. ".tmp", 'wb'))
p:write('<?xml version="1.0" encoding="Windows-1252"?>', LF)
p:write('<Project')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
local common_dir = get_common_dir(util.filter(project.Sources, function (s) return not s.Generated end))
local common_dir_gen = get_common_dir(util.filter(project.Sources, function (s) return s.Generated end))
local filters = {}
local sources = {}
-- Mangle source filenames, and find which filters need to be created
for _, record in ipairs(project.Sources) do
local fn = record.Path
local common_start = record.Generated and common_dir_gen or common_dir
if fn:find(common_start, 1, true) then
fn = fn:sub(#common_start+1)
end
local dir, filename = path.split(fn)
if dir == '.' then
dir = nil
end
local abs_path = record.Path
if not path.is_absolute(abs_path) then
abs_path = native.getcwd() .. '\\' .. abs_path
end
if record.Generated then
dir = 'Generated Files'
end
sources[#sources + 1] = {
FullPath = abs_path,
Directory = dir,
}
-- Register filter and all its parents
while dir and dir ~= '.' do
filters[dir] = true
dir, _ = path.split(dir)
end
end
-- Emit list of filters
p:write('\t<ItemGroup>', LF)
for filter_name, _ in pairs(filters) do
if filter_name ~= "" then
filter_guid = get_guid_string(filter_name)
p:write('\t\t<Filter Include="', filter_name, '">', LF)
p:write('\t\t\t<UniqueIdentifier>{', filter_guid, '}</UniqueIdentifier>', LF)
p:write('\t\t</Filter>', LF)
end
end
p:write('\t</ItemGroup>', LF)
-- Emit list of source files
p:write('\t<ItemGroup>', LF)
for _, source in ipairs(sources) do
local ext = path.get_extension(source.FullPath)
local cl_tag = cl_tags[ext] or 'ClCompile'
if not source.Directory then
p:write('\t\t<', cl_tag, ' Include="', source.FullPath, '" />', LF)
else
p:write('\t\t<', cl_tag, ' Include="', source.FullPath, '">', LF)
p:write('\t\t\t<Filter>', source.Directory, '</Filter>', LF)
p:write('\t\t</', cl_tag, '>', LF)
end
end
p:write('\t</ItemGroup>', LF)
p:write('</Project>', LF)
p:close()
replace_if_changed(fn .. ".tmp", fn)
end
function msvc_generator:generate_project_user(project)
local fn = project.Filename .. ".user"
-- Don't overwrite user settings
do
local p, err = io.open(fn, 'rb')
if p then
p:close()
return
end
end
local p = assert(io.open(fn, 'wb'))
p:write('<?xml version="1.0" encoding="utf-8"?>', LF)
p:write('<Project')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
for _, tuple in ipairs(self.config_tuples) do
local dag_node = find_dag_node_for_config(project, tuple)
if dag_node then
local exe = nil
for _, output in util.nil_ipairs(dag_node.outputs) do
if output:match("%.exe") then
exe = output
break
end
end
if exe then
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'">', LF)
p:write('\t\t<LocalDebuggerCommand>', native.getcwd() .. '\\' .. exe, '</LocalDebuggerCommand>', LF)
p:write('\t\t<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>', LF)
p:write('\t\t<LocalDebuggerWorkingDirectory>', native.getcwd(), '</LocalDebuggerWorkingDirectory>', LF)
p:write('\t</PropertyGroup>', LF)
end
end
end
p:write('</Project>', LF)
p:close()
end
function msvc_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names, hints, ide_script)
assert(config_tuples and #config_tuples > 0)
if not hints then
hints = {}
end
local complained_mappings = {}
self.msvc_platforms = {}
local msvc_hints = hints.Msvc or {}
local variant_mappings = msvc_hints.VariantMappings or {}
local platform_mappings = msvc_hints.PlatformMappings or {}
local full_mappings = msvc_hints.FullMappings or {}
for _, tuple in ipairs(config_tuples) do
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
if full_mappings[build_id] then
local m = full_mappings[build_id]
tuple.MsvcConfiguration = assert(m.Config)
tuple.MsvcPlatform = assert(m.Platform)
elseif variant_mappings[tuple.Variant.Name] then
tuple.MsvcConfiguration = variant_mappings[tuple.Variant.Name]
elseif variant_mappings[tuple.Variant.Name .. "-" .. tuple.SubVariant] then
tuple.MsvcConfiguration = variant_mappings[tuple.Variant.Name .. "-" .. tuple.SubVariant]
else
tuple.MsvcConfiguration = tuple.Variant.Name
end
-- Use IdeGenerationHints.Msvc.PlatformMappings table to map tundra
-- configurations to MSVC platform names. Note that this isn't a huge deal
-- for building stuff as Tundra doesn't care about this setting. But it
-- might influence the choice of debugger and affect include paths for
-- things like Intellisense that certain users may care about.
if not tuple.MsvcPlatform then
tuple.MsvcPlatform = platform_mappings[tuple.Config.Name]
end
-- If we didn't find anything, warn and then default to Win32, which VS
-- will always accept (or so one would assume)
if not tuple.MsvcPlatform then
tuple.MsvcPlatform = "Win32"
if not complained_mappings[tuple.Config.Name] then
printf("warning: No VS platform mapping for %s, mapping to Win32", tuple.Config.Name)
print("(Add one to IdeGenerationHints.Msvc.PlatformMappings to override)")
complained_mappings[tuple.Config.Name] = true
end
end
tuple.MsvcName = tuple.MsvcConfiguration .. "|" .. tuple.MsvcPlatform
self.msvc_platforms[tuple.MsvcPlatform] = true
end
self.config_tuples = config_tuples
printf("Generating Visual Studio projects for %d configurations/variants", #config_tuples)
-- Figure out where we're going to store the projects
local solutions, projects = make_project_data(raw_nodes, env, ".vcxproj", hints, ide_script)
local proj_lut = {}
for _, p in ipairs(projects) do
proj_lut[p.Name] = p
end
for _, sln in pairs(solutions) do
self:generate_solution(sln.Filename, sln.Projects, sln.ExternalProjects, sln)
end
for _, proj in ipairs(projects) do
self:generate_project(proj, projects)
self:generate_project_filters(proj)
self:generate_project_user(proj)
end
end
function setup(version_short, version_year, hooks)
VERSION_NUMBER = version_short
VERSION_YEAR = version_year
if hooks then
HOOKS = hooks
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, msvc_generator)
state:generate_files(...)
end)
end

View File

@ -0,0 +1,173 @@
<!--
This file is an awful hack to create a nmake-like builder for tundra that
doesn't just kill the tool when you cancel the build.
-->
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<UsingTask TaskName="VCMessage" AssemblyName="Microsoft.Build.CppTasks.Common, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
<UsingTask TaskName="SofterExec" TaskFactory="CodeTaskFactory" AssemblyFile="$(MSBuildToolsPath)\Microsoft.Build.Tasks.v4.0.dll">
<ParameterGroup>
<Command Required="true" />
</ParameterGroup>
<Task>
<Reference Include="Microsoft.Build.Utilities.v4.0" />
<Reference Include="Microsoft.Build.Tasks.v4.0" />
<Code Type="Class" Language="cs">
<![CDATA[
using System;
using Microsoft.Build.Utilities;
using Microsoft.Build.Framework;
using System.Diagnostics;
using System.Runtime.InteropServices;
public class SofterExec : Task, ICancelableTask
{
[DllImport("kernel32.dll", SetLastError=true)]
static extern bool GenerateConsoleCtrlEvent(ConsoleCtrlEvent sigevent, int dwProcessGroupId);
public enum ConsoleCtrlEvent
{
CTRL_C = 0,
CTRL_BREAK = 1,
CTRL_CLOSE = 2,
CTRL_LOGOFF = 5,
CTRL_SHUTDOWN = 6
}
public string Command { get; set; }
private volatile bool m_Cancel = false;
public SofterExec()
{
}
public void Cancel()
{
m_Cancel = true;
}
public override bool Execute()
{
try
{
using (Process p = new Process())
{
p.StartInfo.FileName = "cmd";
p.StartInfo.Arguments = "/c \"" + Command + "\"";
p.StartInfo.UseShellExecute = false;
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.OutputDataReceived += (object sender, DataReceivedEventArgs line) => {
if (line.Data != null)
Log.LogMessageFromText(line.Data, MessageImportance.High);
};
p.ErrorDataReceived += (object sender, DataReceivedEventArgs line) => {
if (line.Data != null)
Log.LogMessageFromText(line.Data, MessageImportance.High);
};
p.Start();
p.BeginOutputReadLine();
p.BeginErrorReadLine();
while (!p.WaitForExit(100))
{
if (m_Cancel)
{
// Keep sending CTRL+C events - sometimes it takes more than one..
GenerateConsoleCtrlEvent(ConsoleCtrlEvent.CTRL_C, 0);
}
}
p.WaitForExit();
return m_Cancel ? false : p.ExitCode == 0;
}
}
catch(Exception e)
{
Console.WriteLine(e);
return false;
}
}
}
]]>
</Code>
</Task>
</UsingTask>
<Target Name="CoreClean">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeCleanCommandLine" Condition="'$(NMakeCleanCommandLine)'==''"/>
<SofterExec Command="$(NMakeCleanCommandLine)" Condition="'$(NMakeCleanCommandLine)'!=''"/>
</Target>
<Target Name="Build" DependsOnTargets="PrepareForNMakeBuild;ResolveReferences;GetTargetPath" Returns="$(NMakeManagedOutput)">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeBuildCommandLine" Condition="'$(NMakeBuildCommandLine)'==''"/>
<SofterExec Command="$(NMakeBuildCommandLine)" Condition="'$(NMakeBuildCommandLine)'!=''"/>
</Target>
<Target Name="Rebuild" DependsOnTargets="PrepareForNMakeBuild;Clean;ResolveReferences;GetTargetPath" Returns="$(NMakeManagedOutput)">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeReBuildCommandLine" Condition="'$(NMakeReBuildCommandLine)'==''"/>
<SofterExec Command="$(NMakeReBuildCommandLine)" Condition="'$(NMakeReBuildCommandLine)'!=''"/>
</Target>
<!-- *******************************************************************************************
GetResolved Native Targets
Since Makefile doesn't import Microsoft.common.targets or microsoft.cppbuild.targets,
it needs to have its own set of project to project reference targets.
******************************************************************************************* -->
<Target Name="GetResolvedLinkObjs" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetResolvedLinkLibs" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetResolvedXDCMake" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetCopyToOutputDirectoryItems" />
<Target Name="SetToGetNativeTargetPath" >
<ItemGroup>
<ProjectReference>
<Targets Condition="'%(Extension)' == '.vcxproj'">GetNativeTargetPath;%(Targets)</Targets>
</ProjectReference>
</ItemGroup>
</Target>
<Target Name="GetNativeTargetPath" Returns="@(NMakeNativeOutput)">
<ItemGroup>
<NMakeNativeOutput Condition="'$(CLRSupport)' == '' or '$(CLRSupport)' == 'false'" Include="$(TargetPath)" />
</ItemGroup>
<ItemGroup>
<NMakeNativeOutput Condition="'@(NMakeNativeOutput)' != ''" >
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.obj'">obj</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.lib'">lib</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.dll'">dll</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.xdc'">xdc</FileType>
</NMakeNativeOutput>
</ItemGroup>
</Target>
<Target Name="GetTargetPath" Returns="$(NMakeManagedOutput)">
<PropertyGroup>
<NMakeManagedOutput Condition="'$(CLRSupport)' != '' and '$(CLRSupport)' != 'false'">$(TargetPath)</NMakeManagedOutput>
</PropertyGroup>
</Target>
<Target Name="GetNativeManifest" />
<!-- *******************************************************************************************
Property pages
******************************************************************************************* -->
<ItemGroup Condition="'$(UseDefaultPropertyPageSchemas)' != 'false'">
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\ProjectItemsSchema.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\directories.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\debugger_*.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\nmake.xml" />
<!-- project only rules -->
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\general_makefile.xml">
<Context>Project</Context>
</PropertyPageSchema>
<!-- Property sheet only rules -->
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\general_makefile_ps.xml;$(VCTargetsPath)$(LangID)\usermacros.xml">
<Context>PropertySheet</Context>
</PropertyPageSchema>
</ItemGroup>
</Project>

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2010 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("11.00", "2010")

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2012 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("12.00", "2012")

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2013 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("12.00", "2013")

View File

@ -0,0 +1,735 @@
-- Xcode 3 (works in 4 as well) Workspace/Project file generation
module(..., package.seeall)
local path = require "tundra.path"
local nodegen = require "tundra.nodegen"
local util = require "tundra.util"
local native = require "tundra.native"
local xcode_generator = {}
local xcode_generator = {}
xcode_generator.__index = xcode_generator
function xcode_generator:generate_workspace(fn, projects)
local sln = io.open(fn, 'wb')
sln:write('<?xml version="1.0" encoding="UTF-8"?>\n')
sln:write('<Workspace\n')
sln:write('\tversion = "1.0">\n')
for _, proj in ipairs(projects) do
local name = proj.Decl.Name
local fname = proj.RelativeFilename
if fname == '.' then fname = ''
else fname = fname ..'/'
end
sln:write('\t<FileRef\n')
sln:write('\t\tlocation = "group:', name .. '.xcodeproj">\n')
sln:write('\t</FileRef>\n')
end
sln:write('</Workspace>\n')
end
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary",
}
local function get_absolute_output_path(env)
local base_dir = env:interpolate('$(OBJECTROOT)$(SEP)')
local cwd = native.getcwd()
return cwd .. "/" .. base_dir
end
local function newid(data)
local string = native.digest_guid(data)
-- a bit ugly but is to match the xcode style of UIds
return string.sub(string.gsub(string, '-', ''), 1, 24)
end
local function getfiletype(name)
local types = {
[".c"] = "sourcecode.c.c",
[".cc"] = "sourcecode.cpp.cpp",
[".cpp"] = "sourcecode.cpp.cpp",
[".css"] = "text.css",
[".cxx"] = "sourcecode.cpp.cpp",
[".framework"] = "wrapper.framework",
[".gif"] = "image.gif",
[".h"] = "sourcecode.c.h",
[".html"] = "text.html",
[".lua"] = "sourcecode.lua",
[".m"] = "sourcecode.c.objc",
[".mm"] = "sourcecode.cpp.objc",
[".nib"] = "wrapper.nib",
[".pch"] = "sourcecode.c.h",
[".plist"] = "text.plist.xml",
[".strings"] = "text.plist.strings",
[".xib"] = "file.xib",
[".icns"] = "image.icns",
[""] = "compiled.mach-o.executable",
}
return types[path.get_extension(name)] or "text"
end
local function get_project_data(unit, env)
local decl = unit.Decl
if decl.Name and project_types[unit.Keyword] then
local relative_fn = decl.Name
local sources = util.flatten(decl.Sources) or {}
sources = util.filter(sources, function (x) return type(x) == "string" end)
if decl.SourceDir then
sources = util.map(sources, function (x) return decl.SourceDir .. x end)
end
local source_list = {}
-- Rebuild source list with ids that is needed by the xcode project layout
for _, fn in ipairs(sources) do
source_list[newid(fn)] = fn
end
return {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = relative_fn,
Guid = newid(decl.Name .. "ProjectId"),
}
elseif unit.Keyword == "OsxBundle" then
decl.Name = "OsxBundle"
local source_list = {}
source_list[newid(decl.InfoPList)] = decl.InfoPList
for _, resource in ipairs(decl.Resources) do
if resource.Decl then
source_list[newid(resource.Decl.Source)] = resource.Decl.Source
end
end
return {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = "$(OBJECTDIR)/MyApp.app",
Guid = newid("OsxBundle"),
}
else
return nil
end
end
local function sort_filelist(source_list)
local dest = {}
for k, v in pairs(source_list) do table.insert(dest, { Key = k, Value = v }) end
table.sort(dest, function(a, b) return a.Value < b.Value end)
return dest
end
local function write_file_refs(p, projects)
p:write('/* Begin FBXFileReference section */\n')
local cwd = native.getcwd();
-- build the source list
local full_source_list = {}
for _, project in pairs(projects) do
local sources = project.Sources
for key, fn in pairs(sources) do
full_source_list[key] = fn
end
-- include executable names in the source list as well
if project.Type == "Program" then
full_source_list[newid(project.Decl.Name .. "Program")] = project.Decl.Name
end
end
local source_list = {}
-- As we can't sort hashtables we need to move this over to a regular table
source_list = sort_filelist(full_source_list)
for _, entry in pairs(source_list) do
local key = entry.Key
local fn = entry.Value
local name = path.get_filename(fn)
local file_type = getfiletype(fn)
local str = ""
if file_type == "compiled.mach-o.executable" then
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; includeInIndex = 0; path = "%s"; sourceTree = BUILT_PRODUCTS_DIR; };',
key, fn, file_type, name, fn)
else
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; path = "%s"; sourceTree = "<group>"; };',
key, fn, file_type, name, path.join(cwd, fn))
end
p:write(str, '\n')
end
p:write('/* End FBXFileReference section */\n\n')
end
local function write_legacy_targets(p, projects, env)
p:write('/* Begin PBXLegacyTarget section */\n')
local script_path = get_absolute_output_path(env)
for _, project in pairs(projects) do
local decl = project.Decl
if project.IsMeta then
--[[
isa = PBXLegacyTarget;
buildArgumentsString = "";
buildConfigurationList = D7D12762170E4CF98A79B5EF /* Build configuration list for PBXLegacyTarget "!UpdateWorkspace" */;
buildPhases = (
);
buildToolPath = /Users/danielcollin/unity_ps3/ps3/Projects/JamGenerated/_workspace.xcode_/updateworkspace;
dependencies = (
);
name = "!UpdateWorkspace";
passBuildSettingsInEnvironment = 1;
productName = "!UpdateWorkspace";
--]]
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXLegacyTarget;\n')
p:write('\t\t\tbuildArgumentsString = "', project.MetaData.BuildArgs, '";\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXLegacyTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildToolPath = ', script_path .. project.MetaData.BuildTool, ';\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tpassBuildSettingsInEnvironment = 1;\n')
p:write('\t\t\tproductName = "', decl.Name or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXLegacyTarget section */\n')
end
local function write_native_targes(p, projects)
p:write('/* Begin PBXNativeTarget section */\n')
local categories = {
["Program"] = "com.apple.product-type.tool",
["StaticLibrary"] = "com.apple.product-type.library.static",
["SharedLibrary"] = "com.apple.product-type.library.dynamic",
}
for _, project in pairs(projects) do
local decl = project.Decl
if not project.IsMeta then
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXNativeTarget;\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXNativeTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t\t', newid(decl.Name .. "ShellScript"), ' /* ShellScript */,\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildRules = (\n\t\t\t);\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tProductName = "', decl.Name, '";\n')
p:write('\t\t\tproductReference = ', newid(decl.Name .. "Program"), ' /* ', decl.Name, ' */;\n ')
p:write('\t\t\tproductType = "', categories[project.Type] or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXNativeTarget section */\n')
end
local function write_header(p)
p:write('// !$*UTF8*$!\n')
p:write('{\n')
p:write('\tarchiveVersion = 1;\n')
p:write('\tclasses = {\n')
p:write('\t};\n')
p:write('\tobjectVersion = 45;\n')
p:write('\tobjects = {\n')
p:write('\n')
end
local function get_projects(raw_nodes, env)
local projects = {}
local source_list = {}
source_list[newid("tundra.lua")] = "tundra.lua"
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name = "!BuildWorkspace"
projects[#projects + 1] = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "'' $(CONFIG) $(VARIANT) $(SUBVARIANT) $(ACTION)",
BuildTool = "xcodetundra" },
}
local meta_name = "!UpdateWorkspace"
projects[#projects + 1] = {
Decl = { Name = "!UpdateWorkspace", },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "",
BuildTool = "xcodeupdateproj" },
}
for _, unit in ipairs(raw_nodes) do
local data = get_project_data(unit, env)
if data then projects[#projects + 1] = data; end
end
return projects
end
local function split(fn)
local dir, file = fn:match("^(.*)[/\\]([^\\/]*)$")
if not dir then
return ".", fn
else
return dir, file
end
end
local function split_str(str, pat, name)
local t = {} -- NOTE: use {n = 0} in Lua-5.0
local fpat = "(.-)" .. pat
local last_end = 1
local s, e, cap = str:find(fpat, 1)
table.insert(t,name)
while s do
if s ~= 1 or cap ~= "" then
table.insert(t,cap)
end
last_end = e+1
s, e, cap = str:find(fpat, last_end)
end
if last_end <= #str then
cap = str:sub(last_end)
table.insert(t, cap)
end
return t
end
local function build_name_id(entry, offset, end_count)
local entryname = ""
for p = offset, end_count, 1 do
if entry[p] ~= nil then
entryname = entryname .. entry[p]
end
end
return newid(entryname)
end
local function make_indent(level)
local indent = '\t';
for i=1, level, 1 do
indent = indent .. '\t'
end
return indent
end
local function make_full_path( grp )
local full_path_string = grp.Name
local gparent = grp.Parent
while gparent ~= nil do
full_path_string = gparent.Name ..'/'..full_path_string
gparent = gparent.Parent
end
return full_path_string .. ' : ' .. grp.Key
end
local function write_group_ref(p, g, full_path)
p:write('\t\t', g.Key, ' /* ', full_path .. '/' .. g.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
local dirs = {}
local files = {}
for _, ref in pairs(g.Children) do
if ref.IsDir then
local key = ref.Key
dirs[#dirs + 1] = { Key = key, Name = ref.Name }
else
local key = ref.Key
files[#files + 1] = { Key = key, Name = ref.Name }
end
end
table.sort(dirs, function(a, b) return a.Name < b.Name end)
table.sort(files, function(a, b) return a.Name < b.Name end)
for _, ref in pairs(dirs) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, full_path .. '/' .. ref.Name))
end
for _, ref in pairs(files) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, full_path .. '/' .. ref.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', g.Name, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
end
local function print_children_2(p, children, path, level)
if children == nil then
return path
end
local c
local local_path = ''--path
for _, c in pairs(children) do
local indent = make_indent(level)
local_path = print_children_2( p, c.Children, path .. '/' .. c.Name, level + 1 )
if #c.Children ~= 0 then
write_group_ref(p, c, path)
end
end
return path
end
local function find_group(groups, group, parent)
if groups == nil then return nil end
for _, g in pairs(groups) do
if g.Name == group and g.Parent == parent then
return g
end
local r = find_group( g.Children, group, parent )
if r ~= nil then return r end
end
return nil
end
local function write_sources(p, children, name, parent)
local filelist = sort_filelist(children)
local groups = {};
table.insert(groups, {Name = name, Parent = nil, Key = parent, Children = {} })
for _, entry in pairs(filelist) do
local parent_group = nil
local path, filename = split(entry.Value)
local split_path = split_str(path, "/", name)
for i=1 , #split_path, 1 do
if split_path[i] ~= '.' then
local grp = find_group(groups, split_path[i], parent_group)
if grp == nil then
grp = { IsDir = true, Name=split_path[i], Parent=parent_group, Key=newid(util.tostring(parent_group)..split_path[i]), Children={} }
if parent_group == nil then
table.insert(groups, grp)
else
parent_group = grp.Parent
table.insert(parent_group.Children, grp)
end
end
parent_group = grp
end
end
if parent_group ~= nil then
table.insert(parent_group.Children, { IsDir = false, Name=filename, Parent=parent_group, Key = entry.Key, Children = {}} )
end
end
print_children_2(p, groups, '.', 0);
end
local function write_groups(p, projects)
p:write('/* Begin PBXGroup section */\n')
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
for _, project in pairs(projects) do
write_sources(p, project.Sources, project.Decl.Name, project.Guid)
end
-- write last group that links the projects names above
p:write('\t\t', all_targets_id, ' /* ', all_targets_name, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
for _, project in pairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', project.Guid, project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', all_targets_name, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
p:write('/* End PBXGroup section */\n\n')
end
local function write_project(p, projects)
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
local project_id = newid("ProjectObject")
local project_config_list_id = newid("ProjectObjectConfigList")
p:write('/* Begin PBXProject section */\n')
p:write('\t\t', project_id, ' /* Project object */ = {\n')
p:write('\t\t\tisa = PBXProject;\n')
p:write('\t\t\tbuildConfigurationList = ', project_config_list_id, ' /* Build configuration list for PBXProject "', "Project Object", '" */;\n')
p:write('\t\t\tcompatibilityVersion = "Xcode 3.1";\n')
p:write('\t\t\thasScannedForEncodings = 1;\n')
p:write('\t\t\tmainGroup = ', all_targets_id, ' /* ', all_targets_name, ' */;\n')
p:write('\t\t\tprojectDirPath = "";\n')
p:write('\t\t\tprojectRoot = "";\n')
p:write('\t\t\ttargets = (\n')
for _, project in pairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. "Target"), project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t};\n')
p:write('/* End PBXProject section */\n')
end
local function write_shellscripts(p, projects, env)
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
-- TODO: Do we really need to repead this for all projects? seems a bit wasteful
local xcodetundra_filename = get_absolute_output_path(env) .. "xcodetundra"
for _, project in pairs(projects) do
local name = project.Decl.Name
if not project.IsMeta then
p:write('\t\t', newid(name .. "ShellScript"), ' /* ShellScript */ = {\n')
p:write('\t\t\tisa = PBXShellScriptBuildPhase;\n')
p:write('\t\t\tbuildActionMask = 2147483647;\n')
p:write('\t\t\tfiles = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\tinputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\toutputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\trunOnlyForDeploymentPostprocessing = 0;\n')
p:write('\t\t\tshellPath = /bin/sh;\n')
p:write('\t\t\tshellScript = "', xcodetundra_filename, ' $TARGET_NAME $CONFIG $VARIANT $SUBVARIANT $ACTION -v";\n')
p:write('\t\t};\n')
end
end
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
end
local function get_full_config_name(config)
return config.Config.Name .. '-' .. config.Variant.Name .. '-' .. config.SubVariant
end
local function write_configs(p, projects, config_tuples, env)
p:write('/* Begin XCConfigurationList section */\n')
-- I wonder if we really need to do it this way for all configs?
for __, project in ipairs(projects) do
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local is_macosx_native = false
for _, host in util.nil_ipairs(tuple.Config.SupportedHosts) do
if host == "macosx" then
is_macosx_native = true
end
end
if "macosx" == tuple.Config.DefaultOnHost then
is_macosx_native = true
end
local config_id = newid(project.Decl.Name .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildSettings = {\n')
if is_macosx_native then
p:write('\t\t\t\tARCHS = "$(NATIVE_ARCH_ACTUAL)";\n')
end
p:write('\t\t\t\tVARIANT = "', tuple.Variant.Name, '";\n')
p:write('\t\t\t\tCONFIG = "', tuple.Config.Name, '";\n')
p:write('\t\t\t\tSUBVARIANT = "', tuple.SubVariant, '";\n')
if is_macosx_native and not project.IsMeta then
p:write('\t\t\t\tCONFIGURATION_BUILD_DIR = "', full_config_name, '";\n')
end
-- this is a little hack to get xcode to clean the whole output folder when using "FullBuild"
p:write('\t\t\t\tPRODUCT_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t\tTARGET_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End XCConfigurationList section */\n')
end
local function write_config_list(p, projects, config_tuples)
p:write('/* Begin XCConfigurationList section */\n')
local default_config = "";
-- find the default config
for _, tuple in ipairs(config_tuples) do
local is_macosx_native = tuple.Config.Name:match('^(%macosx)%-')
if is_macosx_native and tuple.Variant.Name == "debug" then
default_config = get_full_config_name(tuple)
break
end
end
-- if we did't find a default config just grab the first one
if default_config == "" then
default_config = get_full_config_name(config_tuples[0])
end
for __, project in ipairs(projects) do
local config_id = newid(project.Decl.Name .. 'Config')
p:write('\t\t', config_id, ' /* Build config list for "', project.Decl.Name, '" */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
end
p:write('/* End XCConfigurationList section */\n')
end
local function write_footer(p)
p:write('\t};\n')
p:write('\trootObject = ', newid("ProjectObject"), ' /* Project object */;\n')
p:write('}\n')
end
local function generate_shellscript(env)
local filename = path.join(get_absolute_output_path(env), "xcodetundra")
local p = assert(io.open(filename, 'wb'))
p:write("#/bin/sh\n")
p:write("TARGET_NAME=$1\n")
p:write("CONFIG=$2\n")
p:write("VARIANT=$3\n")
p:write("SUBVARIANT=$4\n")
p:write("ACTION=$5\n")
p:write('if [ "$5" = "clean" ]; then\n')
p:write(' ACTION="-c"\n')
p:write("fi\n\n")
p:write('if [ "$5" = "build" ]; then\n')
p:write(' ACTION=""\n')
p:write("fi\n\n")
p:write(TundraExePath .. " --full-paths $TARGET_NAME $CONFIG-$VARIANT-$SUBVARIANT $ACTION -v\n")
p:close()
os.execute("chmod +x " .. filename)
local filename = path.join(get_absolute_output_path(env), "xcodeupdateproj")
local p = io.open(filename, 'wb')
p:write("#/bin/sh\n")
p:write(TundraExePath .. " --ide-gen xcode3 -a\n")
p:close()
os.execute("chmod +x " .. filename)
end
function xcode_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names)
assert(config_tuples and #config_tuples > 0)
-- TODO: Set the first default config as default
local base_dir = env:interpolate('$(OBJECTROOT)$(SEP)')
local xcodeproj_dir = base_dir .. "tundra-generated.xcodeproj/"
native.mkdir(base_dir)
native.mkdir(xcodeproj_dir)
generate_shellscript(env)
local p = io.open(path.join(xcodeproj_dir, "project.pbxproj"), 'wb')
local projects = get_projects(raw_nodes, env)
write_header(p)
write_file_refs(p, projects)
write_groups(p, projects)
write_legacy_targets(p, projects, env)
write_native_targes(p, projects)
write_project(p, projects)
write_shellscripts(p, projects, env)
write_configs(p, projects, config_tuples, env)
write_config_list(p, projects, config_tuples)
write_footer(p)
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, xcode_generator)
state:generate_files(...)
end)

View File

@ -0,0 +1,924 @@
-- Xcode 5 Workspace/Project file generation
module(..., package.seeall)
local path = require "tundra.path"
local nodegen = require "tundra.nodegen"
local util = require "tundra.util"
local native = require "tundra.native"
local xcode_generator = {}
xcode_generator.__index = xcode_generator
function xcode_generator:generate_workspace(fn, projects)
local sln = io.open(fn, 'wb')
sln:write('<?xml version="1.0" encoding="UTF-8"?>\n')
sln:write('<Workspace\n')
sln:write('\tversion = "1.0">\n')
for _, proj in ipairs(projects) do
local name = proj.Decl.Name
local fname = proj.RelativeFilename
if fname == '.' then fname = ''
else fname = fname ..'/'
end
sln:write('\t<FileRef\n')
sln:write('\t\tlocation = "group:', name .. '.xcodeproj">\n')
sln:write('\t</FileRef>\n')
end
sln:write('</Workspace>\n')
end
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary",
}
local toplevel_stuff = util.make_lookup_table {
".exe", ".lib", ".dll",
}
local binary_extension = util.make_lookup_table {
"", ".obj", ".o", ".a",
}
local header_exts = util.make_lookup_table {
".h", ".hpp", ".hh", ".inl",
}
local function newid(data)
local string = native.digest_guid(data)
-- a bit ugly but is to match the xcode style of UIds
return string.sub(string.gsub(string, '-', ''), 1, 24)
end
local file_types = {
[".c"] = "sourcecode.c.c",
[".cc"] = "sourcecode.cpp.cpp",
[".cpp"] = "sourcecode.cpp.cpp",
[".css"] = "text.css",
[".cxx"] = "sourcecode.cpp.cpp",
[".framework"] = "wrapper.framework",
[".gif"] = "image.gif",
[".h"] = "sourcecode.c.h",
[".html"] = "text.html",
[".lua"] = "sourcecode.lua",
[".m"] = "sourcecode.c.objc",
[".mm"] = "sourcecode.cpp.objc",
[".nib"] = "wrapper.nib",
[".pch"] = "sourcecode.c.h",
[".plist"] = "text.plist.xml",
[".strings"] = "text.plist.strings",
[".xib"] = "file.xib",
[".icns"] = "image.icns",
[""] = "compiled.mach-o.executable",
}
local function getfiletype(name)
return file_types[path.get_extension(name)] or "text"
end
-- Scan for sources, following dependencies until those dependencies seem to be a different top-level unit
local function get_sources(dag, sources, generated, dag_lut)
for _, output in ipairs(dag.outputs) do
local ext = path.get_extension(output)
if not binary_extension[ext] then
generated[output] = true
sources[output] = true -- pick up generated headers
end
end
for _, input in ipairs(dag.inputs) do
local ext = path.get_extension(input)
if not binary_extension[ext] then
sources[input] = true
end
end
for _, dep in util.nil_ipairs(dag.deps) do
if not dag_lut[dep] then -- don't go into other top-level DAGs
get_sources(dep, sources, generated, dag_lut)
end
end
end
local function get_headers(unit, sources, dag_lut, name_to_dags)
local src_dir = ''
if not unit.Decl then
-- Ignore ExternalLibrary and similar that have no data.
return
end
if unit.Decl.SourceDir then
src_dir = unit.Decl.SourceDir .. '/'
end
for _, src in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Sources)) do
if type(src) == "string" then
local ext = path.get_extension(src)
if header_exts[ext] then
local full_path = path.normalize(src_dir .. src)
sources[full_path] = true
end
end
end
local function toplevel(u)
if type(u) == "string" then
return type(name_to_dags[u]) ~= "nil"
end
for _, dag in pairs(u.Decl.__DagNodes) do
if dag_lut[dag] then
return true
end
end
return false
end
-- Repeat for dependencies ObjGroups
for _, dep in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Depends)) do
if not toplevel(dep) then
get_headers(dep, sources, dag_lut)
end
end
end
local function sort_filelist(source_list)
local dest = {}
for k, v in pairs(source_list) do table.insert(dest, { Key = k, Value = v }) end
table.sort(dest, function(a, b) return a.Value < b.Value end)
return dest
end
local function write_file_refs(p, projects)
p:write('/* Begin FBXFileReference section */\n')
local cwd = native.getcwd();
-- build the source list
local full_source_list = {}
for _, project in ipairs(projects) do
local sources = project.Sources
for key, fn in pairs(sources) do
full_source_list[key] = fn
end
-- include executable names in the source list as well
if project.Type == "Program" then
full_source_list[newid(project.Decl.Name .. "Program")] = project.Decl.Name
end
end
local source_list = {}
-- As we can't sort hashtables we need to move this over to a regular table
source_list = sort_filelist(full_source_list)
for _, entry in pairs(source_list) do
local key = entry.Key
local fn = entry.Value
local name = path.get_filename(fn)
local file_type = getfiletype(fn)
local str = ""
if file_type == "compiled.mach-o.executable" then
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; includeInIndex = 0; path = "%s"; sourceTree = BUILT_PRODUCTS_DIR; };',
key, fn, file_type, name, fn)
else
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; path = "%s"; sourceTree = "<group>"; };',
key, fn, file_type, name, path.join(cwd, fn))
end
p:write(str, '\n')
end
p:write('/* End FBXFileReference section */\n\n')
end
local function write_legacy_targets(p, projects, env)
p:write('/* Begin PBXLegacyTarget section */\n')
for _, project in ipairs(projects) do
local decl = project.Decl
if project.IsMeta then
--[[
isa = PBXLegacyTarget;
buildArgumentsString = "";
buildConfigurationList = D7D12762170E4CF98A79B5EF /* Build configuration list for PBXLegacyTarget "!UpdateWorkspace" */;
buildPhases = (
);
buildToolPath = /Users/danielcollin/unity_ps3/ps3/Projects/JamGenerated/_workspace.xcode_/updateworkspace;
dependencies = (
);
name = "!UpdateWorkspace";
passBuildSettingsInEnvironment = 1;
productName = "!UpdateWorkspace";
--]]
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXLegacyTarget;\n')
p:write('\t\t\tbuildArgumentsString = "', project.MetaData.BuildArgs, '";\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXLegacyTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildToolPath = ', project.MetaData.BuildTool, ';\n')
p:write('\t\t\tbuildWorkingDirectory = ', '..', ';\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tpassBuildSettingsInEnvironment = 1;\n')
p:write('\t\t\tproductName = "', decl.Name or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXLegacyTarget section */\n')
end
local function write_native_targes(p, projects)
p:write('/* Begin PBXNativeTarget section */\n')
local categories = {
["Program"] = "com.apple.product-type.tool",
["StaticLibrary"] = "com.apple.product-type.library.static",
["SharedLibrary"] = "com.apple.product-type.library.dynamic",
}
for _, project in ipairs(projects) do
local decl = project.Decl
if not project.IsMeta then
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXNativeTarget;\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXNativeTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t\t', newid(decl.Name .. "ShellScript"), ' /* ShellScript */,\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildRules = (\n\t\t\t);\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tProductName = "', decl.Name, '";\n')
p:write('\t\t\tproductReference = ', newid(decl.Name .. "Program"), ' /* ', decl.Name, ' */;\n ')
p:write('\t\t\tproductType = "', categories[project.Type] or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXNativeTarget section */\n')
end
local function write_header(p)
p:write('// !$*UTF8*$!\n')
p:write('{\n')
p:write('\tarchiveVersion = 1;\n')
p:write('\tclasses = {\n')
p:write('\t};\n')
p:write('\tobjectVersion = 45;\n')
p:write('\tobjects = {\n')
p:write('\n')
end
local function get_projects(raw_nodes, env, hints, ide_script)
local projects = {}
-- Filter out stuff we don't care about.
local units = util.filter(raw_nodes, function (u)
return u.Decl.Name and project_types[u.Keyword]
end)
local dag_node_lut = {} -- lookup table of all named, top-level DAG nodes
local name_to_dags = {} -- table mapping unit name to array of dag nodes (for configs)
-- Map out all top-level DAG nodes
for _, unit in ipairs(units) do
local decl = unit.Decl
local dag_nodes = assert(decl.__DagNodes, "no dag nodes for " .. decl.Name)
for build_id, dag_node in pairs(dag_nodes) do
dag_node_lut[dag_node] = unit
local array = name_to_dags[decl.Name]
if not array then
array = {}
name_to_dags[decl.Name] = array
end
array[#array + 1] = dag_node
end
end
-- Sort units based on dependency complexity. We want to visit the leaf nodes
-- first so that any source file references are picked up as close to the
-- bottom of the dependency chain as possible.
local unit_weights = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local stack = { }
for _, dag in pairs(decl.__DagNodes) do
stack[#stack + 1] = dag
end
local weight = 0
while #stack > 0 do
local node = table.remove(stack)
if dag_node_lut[node] then
weight = weight + 1
end
for _, dep in util.nil_ipairs(node.deps) do
stack[#stack + 1] = dep
end
end
unit_weights[unit] = weight
end
table.sort(units, function (a, b)
return unit_weights[a] < unit_weights[b]
end)
-- Keep track of what source files have already been grabbed by other projects.
local grabbed_sources = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local name = decl.Name
local sources = {}
local generated = {}
for build_id, dag_node in pairs(decl.__DagNodes) do
get_sources(dag_node, sources, generated, dag_node_lut)
end
-- Explicitly add all header files too as they are not picked up from the DAG
-- Also pick up headers from non-toplevel DAGs we're depending on
get_headers(unit, sources, dag_node_lut, name_to_dags)
-- Figure out which project should get this data.
local output_name = name
local ide_hints = unit.Decl.IdeGenerationHints
if ide_hints then
if ide_hints.OutputProject then
output_name = ide_hints.OutputProject
end
end
-- Rebuild source list with ids that are needed by the xcode project layout
local source_list = {}
for src, _ in pairs(sources) do
local norm_src = path.normalize(src)
-- if not grabbed_sources[norm_src] then
grabbed_sources[norm_src] = unit
source_list[newid(norm_src)] = norm_src
-- end
end
projects[name] = {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = name,
Guid = newid(name .. "ProjectId"),
IdeGenerationHints = unit.Decl.IdeGenerationHints
}
end
for _, unit in ipairs(raw_nodes) do
if unit.Keyword == "OsxBundle" then
local decl = unit.Decl
decl.Name = "OsxBundle"
local source_list = {[newid(decl.InfoPList)] = decl.InfoPList}
for _, resource in util.nil_ipairs(decl.Resources) do
if resource.Decl then
source_list[newid(resource.Decl.Source)] = resource.Decl.Source
end
end
projects["OsxBundle"] = {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = "$(OBJECTDIR)/MyApp.app",
Guid = newid("OsxBundle"),
}
end
end
return projects
end
local function split(fn)
local dir, file = fn:match("^(.*)[/\\]([^\\/]*)$")
if not dir then
return ".", fn
else
return dir, file
end
end
local function split_str(str, pat)
local t = {} -- NOTE: use {n = 0} in Lua-5.0
local fpat = "(.-)" .. pat
local last_end = 1
local s, e, cap = str:find(fpat, 1)
while s do
if s ~= 1 or cap ~= "" then
table.insert(t,cap)
end
last_end = e+1
s, e, cap = str:find(fpat, last_end)
end
if last_end <= #str then
cap = str:sub(last_end)
table.insert(t, cap)
end
return t
end
local function print_children_2(p, groupname, key, children, path)
for name, c in pairs(children) do
if c.Type > 0 then
print_children_2(p, name, c.Key, c.Children, c.Type == 1 and path..'/'..name or path)
end
end
p:write('\t\t', key, ' /* ', path, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
local dirs = {}
local files = {}
for name, ref in pairs(children) do
if ref.Type > 0 then
dirs[#dirs + 1] = { Key = ref.Key, Name = name }
else
files[#files + 1] = { Key = ref.Key, Name = name }
end
end
table.sort(dirs, function(a, b) return a.Name < b.Name end)
table.sort(files, function(a, b) return a.Name < b.Name end)
for i, ref in pairs(dirs) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, path .. '/' .. ref.Name))
end
for i, ref in pairs(files) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, path .. '/' .. ref.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', groupname, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
end
local function prune_groups(group)
local i = 0
local first_name
local first_child
for name, child in pairs(group.Children) do
first_name = name
first_child = child
i = i + 1
end
if i == 1 and first_child.Type > 0 then
local new_name = prune_groups(first_child)
group.Children = first_child.Children;
if not new_name then
new_name = first_name
end
return new_name
else
local children = {}
for name, child in pairs(group.Children) do
if child.Type > 0 then
local new_name = prune_groups(child)
if new_name then
name = new_name
end
end
children[name] = child
end
group.children = children
return nil
end
end
local function make_groups(p, files, key)
local filelist = sort_filelist(files)
local group = { Type = 2, Key = key, Children = {} }
for _, entry in pairs(filelist) do
local parent_group = group
local path, filename = split(entry.Value)
for i, part in ipairs(split_str(path, "/")) do
if part ~= '.' then
local grp = parent_group.Children[part]
if grp == nil then
grp = { Type = 1, Key=newid(util.tostring(parent_group)..part), Children={} }
parent_group.Children[part] = grp
end
parent_group = grp
end
end
parent_group.Children[filename] = { Type = 0, Key = entry.Key }
end
-- prune single-entry groups
prune_groups(group)
return group
end
local function write_groups(p, projects)
p:write('/* Begin PBXGroup section */\n')
-- Map folder names to array of projects under that folder
local folders = {}
for _, project in ipairs(projects) do
local hints = project.IdeGenerationHints
local msvc_hints = hints and hints.Msvc
local fname = msvc_hints and msvc_hints.SolutionFolder
if fname == nil then
fname = "<root>"
end
local folder = folders[fname]
if folder == nil then
folder = { Type = 2, Key = newid("Folder"..fname), Children = {} }
folders[fname] = folder
end
folder.Children[project.Decl.Name] = make_groups(p, project.Sources, project.Guid)
end
local root = folders["<root>"];
for name, folder in pairs(folders) do
if folder ~= root then
root.Children[name] = folder
end
end
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
print_children_2(p, all_targets_name, all_targets_id, root.Children, '.');
-- write last group that links the projects names above
-- local all_targets_name = "AllTargets.workspace"
-- local all_targets_id = newid(all_targets_name)
-- p:write('\t\t', all_targets_id, ' /* ', all_targets_name, ' */ = {\n')
-- p:write('\t\t\tisa = PBXGroup;\n')
-- p:write('\t\t\tchildren = (\n')
-- for _, project in pairs(projects) do
-- p:write(string.format('\t\t\t\t%s /* %s */,\n', project.Guid, project.Decl.Name))
-- end
-- p:write('\t\t\t);\n')
-- p:write('\t\t\tname = "', all_targets_name, '"; \n');
-- p:write('\t\t\tsourceTree = "<group>";\n');
-- p:write('\t\t};\n')
p:write('/* End PBXGroup section */\n\n')
end
local function write_project(p, projects)
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
local project_id = newid("ProjectObject")
local project_config_list_id = newid("ProjectObjectConfigList")
p:write('/* Begin PBXProject section */\n')
p:write('\t\t', project_id, ' /* Project object */ = {\n')
p:write('\t\t\tisa = PBXProject;\n')
p:write('\t\t\tbuildConfigurationList = ', project_config_list_id, ' /* Build configuration list for PBXProject "', "Project Object", '" */;\n')
p:write('\t\t\tcompatibilityVersion = "Xcode 3.1";\n')
p:write('\t\t\thasScannedForEncodings = 1;\n')
p:write('\t\t\tmainGroup = ', all_targets_id, ' /* ', all_targets_name, ' */;\n')
p:write('\t\t\tprojectDirPath = "";\n')
p:write('\t\t\tprojectRoot = "";\n')
p:write('\t\t\ttargets = (\n')
for _, project in ipairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. "Target"), project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t};\n')
p:write('/* End PBXProject section */\n')
end
local function write_shellscripts(p, projects, env)
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
-- TODO: Do we really need to repeat this for all projects? seems a bit wasteful
for _, project in ipairs(projects) do
local name = project.Decl.Name
if not project.IsMeta then
p:write('\t\t', newid(name .. "ShellScript"), ' /* ShellScript */ = {\n')
p:write('\t\t\tisa = PBXShellScriptBuildPhase;\n')
p:write('\t\t\tbuildActionMask = 2147483647;\n')
p:write('\t\t\tfiles = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\tinputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\toutputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\trunOnlyForDeploymentPostprocessing = 0;\n')
p:write('\t\t\tshellPath = /bin/sh;\n')
p:write('\t\t\tshellScript = "cd ..\\n', TundraExePath, ' $(CONFIG)-$(VARIANT)-$(SUBVARIANT)";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXShellScriptBuildPhase section */\n')
end
local function get_full_config_name(config)
return config.Config.Name .. '-' .. config.Variant.Name .. '-' .. config.SubVariant
end
local function write_configs(p, projects, config_tuples, env, set_env)
p:write('/* Begin XCBuildConfiguration section */\n')
-- I wonder if we really need to do it this way for all configs?
for _, project in ipairs(projects) do
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local is_macosx_native = false
for _, host in util.nil_ipairs(tuple.Config.SupportedHosts) do
if host == "macosx" then
is_macosx_native = true
end
end
if "macosx" == tuple.Config.DefaultOnHost then
is_macosx_native = true
end
local config_id = newid(project.Decl.Name .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildSettings = {\n')
if is_macosx_native then
p:write('\t\t\t\tARCHS = "$(NATIVE_ARCH_ACTUAL)";\n')
end
p:write('\t\t\t\tVARIANT = "', tuple.Variant.Name, '";\n')
p:write('\t\t\t\tCONFIG = "', tuple.Config.Name, '";\n')
p:write('\t\t\t\tSUBVARIANT = "', tuple.SubVariant, '";\n')
if is_macosx_native and not project.IsMeta then
p:write('\t\t\t\tCONFIGURATION_BUILD_DIR = "', full_config_name, '";\n')
end
-- this is a little hack to get xcode to clean the whole output folder when using "FullBuild"
p:write('\t\t\t\tPRODUCT_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t\tTARGET_NAME = "',project.Decl.Name , '";\n')
for i, var in ipairs(set_env) do
p:write('\t\t\t\t', var, ' = "', os.getenv(var), '";\n')
end
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
end
-- PBXProject configurations
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local config_id = newid("ProjectObject" .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
p:write('\t\t\tbuildSettings = {\n')
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
p:write('/* End XCBuildConfiguration section */\n')
end
local function write_config_list(p, projects, config_tuples)
p:write('/* Begin XCConfigurationList section */\n')
local default_config = "";
-- find the default config
for _, tuple in ipairs(config_tuples) do
local is_macosx_native = tuple.Config.Name:match('^(%macosx)%-')
if is_macosx_native and tuple.Variant.Name == "debug" then
default_config = get_full_config_name(tuple)
break
end
end
-- if we did't find a default config just grab the first one
if default_config == "" then
default_config = get_full_config_name(config_tuples[1])
end
for __, project in ipairs(projects) do
local config_id = newid(project.Decl.Name .. 'Config')
p:write('\t\t', config_id, ' /* Build config list for "', project.Decl.Name, '" */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
end
-- PBXProject configuration list
local config_id = newid("ProjectObjectConfigList")
p:write('\t\t', config_id, ' /* Build config list for PBXProject */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid("ProjectObject" .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
p:write('/* End XCConfigurationList section */\n')
end
local function write_footer(p)
p:write('\t};\n')
p:write('\trootObject = ', newid("ProjectObject"), ' /* Project object */;\n')
p:write('}\n')
end
local function make_meta_projects(ide_script)
local source_list = {
[newid("tundra.lua")] = "tundra.lua"
}
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name1 = "!BuildWorkspace"
local meta_name2 = "!UpdateWorkspace"
return {
{
Decl = { Name = meta_name1, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name1 .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "-v $(CONFIG)-$(VARIANT)-$(SUBVARIANT)", BuildTool = TundraExePath },
},
{
Decl = { Name = meta_name2, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name2 .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "--g " .. ide_script, BuildTool = TundraExePath },
}
}
end
function xcode_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names, hints, ide_script)
assert(config_tuples and #config_tuples > 0)
hints = hints or {}
hints = hints.Xcode or {}
local base_dir = hints.BaseDir and (hints.BaseDir .. '/') or env:interpolate('$(OBJECTROOT)$(SEP)')
native.mkdir(base_dir)
local projects = get_projects(raw_nodes, env, hints, ide_script)
local source_list = {
[newid("tundra.lua")] = "tundra.lua"
}
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name = "!BuildWorkspace"
local build_project = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "$(CONFIG)-$(VARIANT)-$(SUBVARIANT)", BuildTool = TundraExePath },
}
local meta_name = "!UpdateWorkspace"
local generate_project = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "--g " .. ide_script, BuildTool = TundraExePath },
}
local solution_hints = hints.Projects
if not solution_hints then
print("No IdeGenerationHints.Xcode.Projects specified - using defaults")
solution_hints = {
['tundra-generated.sln'] = { }
}
end
for name, data in pairs(solution_hints) do
local sln_projects = { build_project, generate_project }
if data.Projects then
for _, pname in ipairs(data.Projects) do
local pp = projects[pname]
if not pp then
errorf("can't find project %s for inclusion in %s -- check your Projects data", pname, name)
end
sln_projects[#sln_projects + 1] = pp
end
else
-- All the projects (that are not meta)
for pname, pp in pairs(projects) do
sln_projects[#sln_projects + 1] = pp
end
end
local proj_dir = base_dir .. path.drop_suffix(name) .. ".xcodeproj/"
native.mkdir(proj_dir)
local p = io.open(path.join(proj_dir, "project.pbxproj"), 'wb')
write_header(p)
write_file_refs(p, sln_projects)
write_groups(p, sln_projects)
write_legacy_targets(p, sln_projects, env)
write_native_targes(p, sln_projects)
write_project(p, sln_projects)
write_shellscripts(p, sln_projects, env)
write_configs(p, sln_projects, config_tuples, env, hints.EnvVars or {})
write_config_list(p, sln_projects, config_tuples)
write_footer(p)
end
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, xcode_generator)
state:generate_files(...)
end)

View File

@ -0,0 +1,34 @@
module(..., package.seeall)
init_tundra_lua = [====[
local CFiles = { ".c", ".h" }
Build {
Configs = {
Config {
Name = "generic-gcc",
DefaultOnHost = "linux",
Tools = { "gcc" },
},
Config {
Name = "macosx-gcc",
DefaultOnHost = "macosx",
Tools = { "gcc-osx" },
},
Config {
Name = "win64-msvc",
DefaultOnHost = "windows",
Tools = { "msvc-vs2008"; TargetPlatform = "x64" },
},
},
Units = function()
require "tundra.syntax.glob"
Program {
Name = "a.out",
Sources = { Glob { Dir = ".", Extensions = CFiles } },
}
Default "a.out"
end,
}
]====]

View File

@ -0,0 +1,914 @@
module(..., package.seeall)
local unitgen = require "tundra.unitgen"
local util = require "tundra.util"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local buildfile = require "tundra.buildfile"
local native = require "tundra.native"
local ide_backend = nil
local current = nil
local _nodegen = { }
_nodegen.__index = _nodegen
local function syntax_error(msg, ...)
error { Class = 'syntax error', Message = string.format(msg, ...) }
end
local function validate_boolean(name, value)
if type(value) == "boolean" then
return value
end
syntax_error("%s: expected boolean value, got %q", name, type(value))
end
local function validate_string(name, value)
if type(value) == "string" then
return value
end
syntax_error("%s: expected string value, got %q", name, type(value))
end
local function validate_pass(name, value)
if type(value) == "string" then
return value
else
syntax_error("%s: expected pass name, got %q", name, type(value))
end
end
local function validate_table(name, value)
-- A single string can be converted into a table value very easily
local t = type(value)
if t == "table" then
return value
elseif t == "string" then
return { value }
else
syntax_error("%s: expected table value, got %q", name, t)
end
end
local function validate_config(name, value)
if type(value) == "table" or type(value) == "string" then
return value
end
syntax_error("%s: expected config, got %q", name, type(value))
end
local validators = {
["string"] = validate_string,
["pass"] = validate_pass,
["table"] = validate_table,
["filter_table"] = validate_table,
["source_list"] = validate_table,
["boolean"] = validate_boolean,
["config"] = validate_config,
}
function _nodegen:validate()
local decl = self.Decl
for name, detail in pairs(assert(self.Blueprint)) do
local val = decl[name]
if not val then
if detail.Required then
syntax_error("%s: missing argument: '%s'", self.Keyword, name)
end
-- ok, optional value
else
local validator = validators[detail.Type]
decl[name] = validator(name, val)
end
end
for name, detail in pairs(decl) do
if not self.Blueprint[name] then
syntax_error("%s: unsupported argument: '%s'", self.Keyword, name)
end
end
end
function _nodegen:customize_env(env, raw_data)
-- available for subclasses
end
function _nodegen:configure_env(env, deps)
local build_id = env:get('BUILD_ID')
local propagate_blocks = {}
local decl = self.Decl
for _, dep_obj in util.nil_ipairs(deps) do
local data = dep_obj.Decl.Propagate
if data then
propagate_blocks[#propagate_blocks + 1] = data
end
end
local function push_bindings(env_key, data)
if data then
for _, item in util.nil_ipairs(flatten_list(build_id, data)) do
env:append(env_key, item)
end
end
end
local function replace_bindings(env_key, data)
if data then
local first = true
for _, item in util.nil_ipairs(flatten_list(build_id, data)) do
if first then
env:replace(env_key, item)
first = false
else
env:append(env_key, item)
end
end
end
end
-- Push Libs, Defines and so in into the environment of this unit.
-- These are named for convenience but are aliases for syntax niceness.
for decl_key, env_key in util.nil_pairs(self.DeclToEnvMappings) do
-- First pick settings from our own unit.
push_bindings(env_key, decl[decl_key])
for _, data in ipairs(propagate_blocks) do
push_bindings(env_key, data[decl_key])
end
end
-- Push Env blocks as is
for k, v in util.nil_pairs(decl.Env) do
push_bindings(k, v)
end
for k, v in util.nil_pairs(decl.ReplaceEnv) do
replace_bindings(k, v)
end
for _, block in util.nil_ipairs(propagate_blocks) do
for k, v in util.nil_pairs(block.Env) do
push_bindings(k, v)
end
for k, v in util.nil_pairs(block.ReplaceEnv) do
replace_bindings(k, v)
end
end
end
local function resolve_sources(env, items, accum, base_dir)
local ignored_exts = util.make_lookup_table(env:get_list("IGNORED_AUTOEXTS", {}))
for _, item in util.nil_ipairs(items) do
local type_name = type(item)
assert(type_name ~= "function")
if type_name == "userdata" then
accum[#accum + 1] = item
elseif type_name == "table" then
if depgraph.is_node(item) then
accum[#accum + 1] = item
elseif getmetatable(item) then
accum[#accum + 1] = item:get_dag(env)
else
resolve_sources(env, item, accum, item.SourceDir or base_dir)
end
else
assert(type_name == "string")
local ext = path.get_extension(item)
if not ignored_exts[ext] then
if not base_dir or path.is_absolute(item) then
accum[#accum + 1] = item
else
local p = path.join(base_dir, item)
accum[#accum + 1] = p
end
end
end
end
return accum
end
-- Analyze source list, returning list of input files and list of dependencies.
--
-- This is so you can pass a mix of actions producing files and regular
-- filenames as inputs to the next step in the chain and the output files of
-- such nodes will be used automatically.
--
-- list - list of source files and nodes that produce source files
-- suffixes - acceptable source suffixes to pick up from nodes in source list
local function analyze_sources(env, pass, list, suffixes)
if not list then
return nil
end
list = util.flatten(list)
local deps = {}
local function implicit_make(source_file)
local t = type(source_file)
if t == "table" then
return source_file
end
assert(t == "string")
local make = env:get_implicit_make_fn(source_file)
if make then
return make(env, pass, source_file)
else
return nil
end
end
local function transform(output, fn)
if type(fn) ~= "string" then
error(util.tostring(fn) .. " is not a string", 2)
end
local t = implicit_make(fn)
if t then
deps[#deps + 1] = t
t:insert_output_files(output, suffixes)
else
output[#output + 1] = fn
end
end
local files = {}
for _, src in ipairs(list) do
if depgraph.is_node(src) then
deps[#deps + 1] = src
src:insert_output_files(files, suffixes)
elseif type(src) == "table" then
error("non-DAG node in source list at this point")
else
files[#files + 1] = src
end
end
while true do
local result = {}
local old_dep_count = #deps
for _, src in ipairs(files) do
transform(result, src)
end
files = result
if #deps == old_dep_count then
--print("scan", util.tostring(list), util.tostring(suffixes), util.tostring(result))
return result, deps
end
end
end
local function x_identity(self, name, info, value, env, out_deps)
return value
end
local function x_source_list(self, name, info, value, env, out_deps)
local build_id = env:get('BUILD_ID')
local source_files
if build_id then
source_files = filter_structure(build_id, value)
else
source_files = value
end
local sources = resolve_sources(env, source_files, {}, self.Decl.SourceDir)
local source_exts = env:get_list(info.ExtensionKey)
local inputs, ideps = analyze_sources(env, resolve_pass(self.Decl.Pass), sources, source_exts)
if ideps then
util.append_table(out_deps, ideps)
end
return inputs
end
local function x_filter_table(self, name, info, value, env, out_deps)
local build_id = env:get('BUILD_ID')
return flatten_list(build_id, value)
end
local function find_named_node(name_or_dag)
if type(name_or_dag) == "table" then
return name_or_dag:get_dag(current.default_env)
elseif type(name_or_dag) == "string" then
local generator = current.units[name_or_dag]
if not generator then
errorf("unknown node specified: %q", tostring(name_or_dag))
end
return generator:get_dag(current.default_env)
else
errorf("illegal node specified: %q", tostring(name_or_dag))
end
end
-- Special resolver for dependencies in a nested (config-filtered) list.
local function resolve_dependencies(decl, raw_deps, env)
if not raw_deps then
return {}
end
local build_id = env:get('BUILD_ID')
local deps = flatten_list(build_id, raw_deps)
return util.map_in_place(deps, function (i)
if type(i) == "string" then
local n = current.units[i]
if not n then
errorf("%s: Unknown 'Depends' target %q", decl.Name, i)
end
return n
elseif type(i) == "table" and getmetatable(i) and i.Decl then
return i
else
errorf("bad 'Depends' value of type %q", type(i))
end
end)
end
local function x_pass(self, name, info, value, env, out_deps)
return resolve_pass(value)
end
local decl_transformers = {
-- the x_identity data types have already been checked at script time through validate_xxx
["string"] = x_identity,
["table"] = x_identity,
["config"] = x_identity,
["boolean"] = x_identity,
["pass"] = x_pass,
["source_list"] = x_source_list,
["filter_table"] = x_filter_table,
}
-- Create input data for the generator's DAG creation function based on the
-- blueprint passed in when the generator was registered. This is done here
-- centrally rather than in all the different node generators to reduce code
-- duplication and keep the generators miminal. If you need to do something
-- special, you can override create_input_data() in your subclass.
function _nodegen:create_input_data(env)
local decl = self.Decl
local data = {}
local deps = {}
for name, detail in pairs(assert(self.Blueprint)) do
local val = decl[name]
if val then
local xform = decl_transformers[detail.Type]
data[name] = xform(self, name, detail, val, env, deps)
end
end
return data, deps
end
function get_pass(self, name)
if not name then
return nil
end
end
local pattern_cache = {}
local function get_cached_pattern(p)
local v = pattern_cache[p]
if not v then
local comp = '[%w_]+'
local sub_pattern = p:gsub('*', '[%%w_]+')
local platform, tool, variant, subvariant = unitgen.match_build_id(sub_pattern, comp)
v = string.format('^%s%%-%s%%-%s%%-%s$', platform, tool, variant, subvariant)
pattern_cache[p] = v
end
return v
end
local function config_matches(pattern, build_id)
local ptype = type(pattern)
if ptype == "nil" then
return true
elseif ptype == "string" then
local fpattern = get_cached_pattern(pattern)
return build_id:match(fpattern)
elseif ptype == "table" then
for _, pattern_item in ipairs(pattern) do
if config_matches(pattern_item, build_id) then
return true
end
end
return false
else
error("bad 'Config' pattern type: " .. ptype)
end
end
local function make_unit_env(unit)
-- Select an environment for this unit based on its SubConfig tag
-- to support cross compilation.
local env
local subconfig = unit.Decl.SubConfig or current.default_subconfig
if subconfig and current.base_envs then
env = current.base_envs[subconfig]
if Options.VeryVerbose then
if env then
printf("%s: using subconfig %s (%s)", unit.Decl.Name, subconfig, env:get('BUILD_ID'))
else
if current.default_subconfig then
errorf("%s: couldn't find a subconfig env", unit.Decl.Name)
else
printf("%s: no subconfig %s found; using default env", unit.Decl.Name, subconfig)
end
end
end
end
if not env then
env = current.default_env
end
return env:clone()
end
local anon_count = 1
function _nodegen:get_dag(parent_env)
local build_id = parent_env:get('BUILD_ID')
local dag = self.DagCache[build_id]
if not dag then
if build_id:len() > 0 and not config_matches(self.Decl.Config, build_id) then
-- Unit has been filtered out via Config attribute.
-- Create a fresh dummy node for it.
local name
if not self.Decl.Name then
name = string.format("Dummy node %d", anon_count)
else
name = string.format("Dummy node %d for %s", anon_count, self.Decl.Name)
end
anon_count = anon_count + 1
dag = depgraph.make_node {
Env = parent_env,
Pass = resolve_pass(self.Decl.Pass),
Label = name,
}
else
local unit_env = make_unit_env(self)
if self.Decl.Name then
unit_env:set('UNIT_PREFIX', '__' .. self.Decl.Name)
end
-- Before accessing the unit's dependencies, resolve them via filtering.
local deps = resolve_dependencies(self.Decl, self.Decl.Depends, unit_env)
self:configure_env(unit_env, deps)
self:customize_env(unit_env, self.Decl, deps)
local input_data, input_deps = self:create_input_data(unit_env, parent_env)
-- Copy over dependencies which have been pre-resolved
input_data.Depends = deps
for _, dep in util.nil_ipairs(deps) do
input_deps[#input_deps + 1] = dep:get_dag(parent_env)
end
dag = self:create_dag(unit_env, input_data, input_deps, parent_env)
if not dag then
error("create_dag didn't generate a result node")
end
end
self.DagCache[build_id] = dag
end
return dag
end
local _generator = {
Evaluators = {},
}
_generator.__index = _generator
local function new_generator(s)
s = s or {}
s.units = {}
return setmetatable(s, _generator)
end
local function create_unit_map(state, raw_nodes)
-- Build name=>decl mapping
for _, unit in ipairs(raw_nodes) do
assert(unit.Decl)
local name = unit.Decl.Name
if name and type(name) == "string" then
if state.units[name] then
errorf("duplicate unit name: %s", name)
end
state.units[name] = unit
end
end
end
function _generate_dag(args)
local envs = assert(args.Envs)
local raw_nodes = assert(args.Declarations)
local state = new_generator {
base_envs = envs,
root_env = envs["__default"], -- the outmost config's env in a cross-compilation scenario
config = assert(args.Config),
variant = assert(args.Variant),
passes = assert(args.Passes),
}
current = state
create_unit_map(state, raw_nodes)
local subconfigs = state.config.SubConfigs
-- Pick a default environment which is used for
-- 1. Nodes without a SubConfig declaration
-- 2. Nodes with a missing SubConfig declaration
-- 3. All nodes if there are no SubConfigs set for the current config
if subconfigs then
state.default_subconfig = assert(state.config.DefaultSubConfig)
state.default_env = assert(envs[state.default_subconfig], "unknown DefaultSubConfig specified")
else
state.default_env = assert(envs["__default"])
end
local always_lut = util.make_lookup_table(args.AlwaysNodes)
local default_lut = util.make_lookup_table(args.DefaultNodes)
local always_nodes = util.map(args.AlwaysNodes, find_named_node)
local default_nodes = util.map(args.DefaultNodes, find_named_node)
local named_nodes = {}
for name, _ in pairs(state.units) do
named_nodes[name] = find_named_node(name)
end
current = nil
return { always_nodes, default_nodes, named_nodes }
end
function generate_dag(args)
local success, result = xpcall(function () return _generate_dag(args) end, buildfile.syntax_error_catcher)
if success then
return result[1], result[2], result[3]
else
croak("%s", result)
end
end
function resolve_pass(name)
assert(current)
if name then
local p = current.passes[name]
if not p then
syntax_error("%q is not a valid pass name", name)
end
return p
else
return nil
end
end
function get_target(data, suffix, prefix)
local target = data.Target
if not target then
assert(data.Name)
target = "$(OBJECTDIR)/" .. (prefix or "") .. data.Name .. (suffix or "")
end
return target
end
function get_evaluator(name)
return _generator.Evaluators[name]
end
function is_evaluator(name)
if _generator.Evaluators[name] then return true else return false end
end
local common_blueprint = {
Propagate = {
Help = "Declarations to propagate to dependent units",
Type = "filter_table",
},
Depends = {
Help = "Dependencies for this node",
Type = "table", -- handled specially
},
Env = {
Help = "Data to append to the environment for the unit",
Type = "filter_table",
},
ReplaceEnv = {
Help = "Data to replace in the environment for the unit",
Type = "filter_table",
},
Pass = {
Help = "Specify build pass",
Type = "pass",
},
SourceDir = {
Help = "Specify base directory for source files",
Type = "string",
},
Config = {
Help = "Specify configuration this unit will build in",
Type = "config",
},
SubConfig = {
Help = "Specify sub-configuration this unit will build in",
Type = "config",
},
__DagNodes = {
Help = "Internal node to keep track of DAG nodes generated so far",
Type = "table",
}
}
function create_eval_subclass(meta_tbl, base)
base = base or _nodegen
setmetatable(meta_tbl, base)
meta_tbl.__index = meta_tbl
return meta_tbl
end
function add_evaluator(name, meta_tbl, blueprint)
assert(type(name) == "string")
assert(type(meta_tbl) == "table")
assert(type(blueprint) == "table")
-- Set up this metatable as a subclass of _nodegen unless it is already
-- configured.
if not getmetatable(meta_tbl) then
setmetatable(meta_tbl, _nodegen)
meta_tbl.__index = meta_tbl
end
-- Install common blueprint items.
for name, val in pairs(common_blueprint) do
if not blueprint[name] then
blueprint[name] = val
end
end
-- Expand environment shortcuts into options.
for decl_key, env_key in util.nil_pairs(meta_tbl.DeclToEnvMappings) do
blueprint[decl_key] = {
Type = "filter_table",
Help = "Shortcut for environment key " .. env_key,
}
end
for name, val in pairs(blueprint) do
local type_ = assert(val.Type)
if not validators[type_] then
errorf("unsupported blueprint type %q", type_)
end
if val.Type == "source_list" and not val.ExtensionKey then
errorf("%s: source_list must provide ExtensionKey", name)
end
end
-- Record blueprint for use when validating user constructs.
meta_tbl.Keyword = name
meta_tbl.Blueprint = blueprint
-- Store this evaluator under the keyword that will trigger it.
_generator.Evaluators[name] = meta_tbl
end
-- Called when processing build scripts, keywords is something previously
-- registered as an evaluator here.
function evaluate(eval_keyword, data)
local meta_tbl = assert(_generator.Evaluators[eval_keyword])
-- Give the evaluator change to fix up the data before we validate it.
data = meta_tbl:preprocess_data(data)
local object = setmetatable({
DagCache = {}, -- maps BUILD_ID -> dag node
Decl = data
}, meta_tbl)
-- Expose the dag cache to the raw input data so the IDE generator can find it later
data.__DagNodes = object.DagCache
object.__index = object
-- Validate data according to Blueprint settings
object:validate()
return object
end
-- Given a list of strings or nested lists, flatten the structure to a single
-- list of strings while applying configuration filters. Configuration filters
-- match against the current build identifier like this:
--
-- { "a", "b", { "nixfile1", "nixfile2"; Config = "unix-*-*" }, "bar", { "debugfile"; Config = "*-*-debug" }, }
--
-- If 'exclusive' is set, then:
-- If 'build_id' is set, only values _with_ a 'Config' filter are included.
-- If 'build_id' is nil, only values _without_ a 'Config' filter are included.
function flatten_list(build_id, list, exclusive)
if not list then return nil end
local filter_defined = build_id ~= nil
-- Helper function to apply filtering recursively and append results to an
-- accumulator table.
local function iter(node, accum, filtered)
local node_type = type(node)
if node_type == "table" and not getmetatable(node) then
if node.Config then filtered = true end
if not filter_defined or config_matches(node.Config, build_id) then
for _, item in ipairs(node) do
iter(item, accum, filtered)
end
end
elseif not exclusive or (filtered == filter_defined) then
accum[#accum + 1] = node
end
end
local results = {}
iter(list, results, false)
return results
end
-- Conceptually similar to flatten_list(), but retains table structure.
-- Use to keep source tables as they are passed in, to retain nested SourceDir attributes.
local empty_leaf = {} -- constant
function filter_structure(build_id, data, exclusive)
if type(data) == "table" then
if getmetatable(data) then
return data -- it's already a DAG node; use as-is
end
local filtered = data.Config and true or false
if not data.Config or config_matches(data.Config, build_id) then
local result = {}
for k, item in pairs(data) do
if type(k) == "number" then
-- Filter array elements.
result[#result + 1] = filter_structure(build_id, item, filtered)
elseif k ~= "Config" then
-- Copy key-value data through.
result[k] = item
end
end
return result
else
return empty_leaf
end
else
return data
end
end
-- Processes an "Env" table. For each value, the corresponding variable in
-- 'env' is appended to if its "Config" filter matches 'build_id'. If
-- 'build_id' is nil, filtered values are skipped.
function append_filtered_env_vars(env, values_to_append, build_id, exclusive)
for key, val in util.pairs(values_to_append) do
if type(val) == "table" then
local list = flatten_list(build_id, val, exclusive)
for _, subvalue in ipairs(list) do
env:append(key, subvalue)
end
elseif not (exclusive and build_id) then
env:append(key, val)
end
end
end
-- Like append_filtered_env_vars(), but replaces existing variables instead
-- of appending to them.
function replace_filtered_env_vars(env, values_to_replace, build_id, exclusive)
for key, val in util.pairs(values_to_replace) do
if type(val) == "table" then
local list = flatten_list(build_id, val, exclusive)
if #list > 0 then
env:replace(key, list)
end
elseif not (exclusive and build_id) then
env:replace(key, val)
end
end
end
function generate_ide_files(config_tuples, default_names, raw_nodes, env, hints, ide_script)
local state = new_generator { default_env = env }
assert(state.default_env)
create_unit_map(state, raw_nodes)
local backend_fn = assert(ide_backend)
backend_fn(state, config_tuples, raw_nodes, env, default_names, hints, ide_script)
end
function set_ide_backend(backend_fn)
ide_backend = backend_fn
end
-- Expose the DefRule helper which is used to register builder syntax in a
-- simplified way.
function _G.DefRule(ruledef)
local name = assert(ruledef.Name, "Missing Name string in DefRule")
local setup_fn = assert(ruledef.Setup, "Missing Setup function in DefRule " .. name)
local cmd = assert(ruledef.Command, "Missing Command string in DefRule " .. name)
local blueprint = assert(ruledef.Blueprint, "Missing Blueprint in DefRule " .. name)
local mt = create_eval_subclass {}
local annot = ruledef.Annotation
if not annot then
annot = name .. " $(<)"
end
local preproc = ruledef.Preprocess
local function verify_table(v, tag)
if not v then
errorf("No %s returned from DefRule %s", tag, name)
end
if type(v) ~= "table" then
errorf("%s returned from DefRule %s is not a table", tag, name)
end
end
local function make_node(input_files, output_files, env, data, deps, scanner)
return depgraph.make_node {
Env = env,
Label = annot,
Action = cmd,
Pass = data.Pass or resolve_pass(ruledef.Pass),
InputFiles = input_files,
OutputFiles = output_files,
ImplicitInputs = ruledef.ImplicitInputs,
Scanner = scanner,
Dependencies = deps,
}
end
if ruledef.ConfigInvariant then
local cache = {}
function mt:create_dag(env, data, deps)
local setup_data = setup_fn(env, data)
local input_files = setup_data.InputFiles
local output_files = setup_data.OutputFiles
verify_table(input_files, "InputFiles")
verify_table(output_files, "OutputFiles")
local mashup = { }
for _, input in util.nil_ipairs(input_files) do
mashup[#mashup + 1] = input
end
mashup[#mashup + 1] = "@@"
for _, output in util.nil_ipairs(output_files) do
mashup[#mashup + 1] = output
end
mashup[#mashup + 1] = "@@"
for _, implicit_input in util.nil_ipairs(setup_data.ImplicitInputs) do
mashup[#mashup + 1] = implicit_input
end
local key = native.digest_guid(table.concat(mashup, ';'))
local key = util.tostring(key)
if cache[key] then
return cache[key]
else
local node = make_node(input_files, output_files, env, data, deps, setup_data.Scanner)
cache[key] = node
return node
end
end
else
function mt:create_dag(env, data, deps)
local setup_data = setup_fn(env, data)
verify_table(setup_data.InputFiles, "InputFiles")
verify_table(setup_data.OutputFiles, "OutputFiles")
return make_node(setup_data.InputFiles, setup_data.OutputFiles, env, data, deps, setup_data.Scanner)
end
end
if preproc then
function mt:preprocess_data(raw_data)
return preproc(raw_data)
end
end
add_evaluator(name, mt, blueprint)
end
function _nodegen:preprocess_data(data)
return data
end

View File

@ -0,0 +1,50 @@
module(..., package.seeall)
local npath = require "tundra.native.path"
split = npath.split
normalize = npath.normalize
join = npath.join
get_filename_dir = npath.get_filename_dir
get_filename = npath.get_filename
get_extension = npath.get_extension
drop_suffix = npath.drop_suffix
get_filename_base = npath.get_filename_base
is_absolute = npath.is_absolute
function remove_prefix(prefix, fn)
if fn:find(prefix, 1, true) == 1 then
return fn:sub(#prefix + 1)
else
return fn
end
end
function make_object_filename(env, src_fn, suffix)
local object_fn
local src_suffix = get_extension(src_fn):sub(2)
-- Drop leading $(OBJECTDIR)[/\\] in the input filename.
do
local pname = src_fn:match("^%$%(OBJECTDIR%)[/\\](.*)$")
if pname then
object_fn = pname
else
object_fn = src_fn
end
end
-- Compute path under OBJECTDIR we want for the resulting object file.
-- Replace ".." with "dotdot" to avoid creating files outside the
-- object directory. Also salt the generated object name with the source
-- suffix, so that multiple source files with the same base name don't end
-- up clobbering each other (Tundra emits an error for this when checking
-- the DAG)
do
local relative_name = drop_suffix(object_fn:gsub("%.%.", "dotdot"))
object_fn = "$(OBJECTDIR)/$(UNIT_PREFIX)/" .. relative_name .. "__" .. src_suffix .. suffix
end
return object_fn
end

View File

@ -0,0 +1,7 @@
module(..., package.seeall)
local native = require "tundra.native"
function host_platform()
return native.host_platform
end

View File

@ -0,0 +1,57 @@
module(..., package.seeall)
local util = require "tundra.util"
local native = require "tundra.native"
local _scanner_mt = {}
setmetatable(_scanner_mt, { __index = _scanner_mt })
local cpp_scanner_cache = {}
local generic_scanner_cache = {}
function make_cpp_scanner(paths)
local key = table.concat(paths, '\0')
if not cpp_scanner_cache[key] then
local data = { Kind = 'cpp', Paths = paths, Index = #cpp_scanner_cache }
cpp_scanner_cache[key] = setmetatable(data, _scanner_mt)
end
return cpp_scanner_cache[key]
end
function make_generic_scanner(data)
data.Kind = 'generic'
local mashup = { }
local function add_all(l)
for _, value in util.nil_ipairs(l) do
mashup[#mashup + 1] = value
end
end
add_all(data.Paths)
add_all(data.Keywords)
add_all(data.KeywordsNoFollow)
mashup[#mashup + 1] = '!!'
mashup[#mashup + 1] = data.RequireWhitespace and 'y' or 'n'
mashup[#mashup + 1] = data.UseSeparators and 'y' or 'n'
mashup[#mashup + 1] = data.BareMeansSystem and 'y' or 'n'
local key_str = table.concat(mashup, '\001')
local key = native.digest_guid(key_str)
local value = generic_scanner_cache[key]
if not value then
value = data
generic_scanner_cache[key] = data
end
return value
end
function all_scanners()
local scanners = {}
for k, v in pairs(cpp_scanner_cache) do
scanners[v.Index + 1] = v
end
for k, v in pairs(generic_scanner_cache) do
scanners[v.Index + 1] = v
end
return scanners
end

View File

@ -0,0 +1,37 @@
module(..., package.seeall)
local error_count = 0
function _G.unit_test(label, fn)
local t_mt = {
check_equal = function (obj, a, b)
if a ~= b then
error { Message = "Equality test failed: " .. tostring(a) .. " != " .. tostring(b) }
end
end
}
t_mt.__index = t_mt
local t = setmetatable({}, t_mt)
local function stack_dumper(err_obj)
if type(err_obj) == "table" then
return err_obj.Message
end
local debug = require 'debug'
return debug.traceback(err_obj, 2)
end
io.stdout:write("Testing ", label, ": ")
io.stdout:flush()
local ok, err = xpcall(function () fn(t) end, stack_dumper)
if not ok then
io.stdout:write("failed\n")
io.stdout:write(tostring(err), "\n")
error_count = error_count + 1
else
io.stderr:write("OK\n")
end
end
require "tundra.test.t_env"
require "tundra.test.t_path"

View File

@ -0,0 +1,40 @@
-- alias.lua -- support for named aliases in the DAG
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local depgraph = require "tundra.depgraph"
local util = require "tundra.util"
local _alias_mt = nodegen.create_eval_subclass {}
function _alias_mt:create_dag(env, data, input_deps)
local deps = util.clone_table(input_deps)
for _, dep in util.nil_ipairs(data.Depends) do
deps[#deps+1] = dep:get_dag(env:get_parent())
end
local dag = depgraph.make_node {
Env = env,
Label = "Named alias " .. data.Name .. " for " .. env:get('BUILD_ID'),
Pass = data.Pass,
Dependencies = deps,
}
-- Remember this dag node for IDE file generation purposes
data.__DagNode = dag
return dag
end
local alias_blueprint = {
Name = {
Required = true,
Help = "Set alias name",
Type = "string",
},
}
nodegen.add_evaluator("Alias", _alias_mt, alias_blueprint)

View File

@ -0,0 +1,42 @@
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local _bison_mt = nodegen.create_eval_subclass {}
local bison_blueprint = {
Source = { Required = true, Type = "string" },
OutputFile = { Required = false, Type = "string" },
TokenDefines = { Required = false, Type = "boolean" },
}
function _bison_mt:create_dag(env, data, deps)
local src = data.Source
local out_src
if data.OutputFile then
out_src = "$(OBJECTDIR)$(SEP)" .. data.OutputFile
else
local targetbase = "$(OBJECTDIR)$(SEP)bisongen_" .. path.get_filename_base(src)
out_src = targetbase .. ".c"
end
local defopt = ""
local outputs = { out_src }
if data.TokenDefines then
local out_hdr = path.drop_suffix(out_src) .. ".h"
defopt = "--defines=" .. out_hdr
outputs[#outputs + 1] = out_hdr
end
return depgraph.make_node {
Env = env,
Pass = data.Pass,
Label = "Bison $(@)",
Action = "$(BISON) $(BISONOPT) " .. defopt .. " --output-file=$(@:[1]) $(<)",
InputFiles = { src },
OutputFiles = outputs,
Dependencies = deps,
}
end
nodegen.add_evaluator("Bison", _bison_mt, bison_blueprint)

View File

@ -0,0 +1,117 @@
module(..., package.seeall)
local util = require "tundra.util"
local nodegen = require "tundra.nodegen"
local depgraph = require "tundra.depgraph"
local _csbase_mt = nodegen.create_eval_subclass {
DeclToEnvMappings = {
References = "CSLIBS",
RefPaths = "CSLIBPATH",
},
}
local _csexe_mt = nodegen.create_eval_subclass({
Label = "CSharpExe $(@)",
Suffix = "$(CSPROGSUFFIX)",
Action = "$(CSCEXECOM)"
}, _csbase_mt)
local _cslib_mt = nodegen.create_eval_subclass({
Label = "CSharpLib $(@)",
Suffix = "$(CSLIBSUFFIX)",
Action = "$(CSCLIBCOM)"
}, _csbase_mt)
local csSourceExts = { ".cs" }
local csResXExts = { ".resx" }
local function setup_refs_from_dependencies(env, dep_nodes, deps)
local dll_exts = { env:interpolate("$(CSLIBSUFFIX)") }
local refs = {}
local parent_env = env:get_parent()
for _, x in util.nil_ipairs(dep_nodes) do
if x.Keyword == "CSharpLib" then
local outputs = {}
local dag = x:get_dag(parent_env)
deps[#deps + 1] = dag
dag:insert_output_files(refs, dll_exts)
end
end
for _, r in ipairs(refs) do
env:append("CSLIBS", r)
end
end
local function setup_resources(generator, env, assembly_name, resx_files, pass)
local result_files = {}
local deps = {}
local i = 1
for _, resx in util.nil_ipairs(resx_files) do
local basename = path.get_filename_base(resx)
local result_file = string.format("$(OBJECTDIR)/_rescompile/%s.%s.resources", assembly_name, basename)
result_files[i] = result_file
deps[i] = depgraph.make_node {
Env = env,
Pass = pass,
Label = "resgen $(@)",
Action = "$(CSRESGEN)",
InputFiles = { resx },
OutputFiles = { result_file },
}
env:append("CSRESOURCES", result_file)
i = i + 1
end
return result_files, deps
end
function _csbase_mt:create_dag(env, data, deps)
local sources = data.Sources
local resources = data.Resources or {}
for _, r in util.nil_ipairs(resources) do
env:append("CSRESOURCES", r)
end
sources = util.merge_arrays_2(sources, resources)
setup_refs_from_dependencies(env, data.Depends, deps)
return depgraph.make_node {
Env = env,
Pass = data.Pass,
Label = self.Label,
Action = self.Action,
InputFiles = sources,
OutputFiles = { nodegen.get_target(data, self.Suffix, self.Prefix) },
Dependencies = util.uniq(deps),
}
end
do
local csblueprint = {
Name = {
Required = true,
Help = "Set output (base) filename",
Type = "string",
},
Sources = {
Required = true,
Help = "List of source files",
Type = "source_list",
ExtensionKey = "DOTNET_SUFFIXES",
},
Resources = {
Help = "List of resource files",
Type = "source_list",
ExtensionKey = "DOTNET_SUFFIXES_RESOURCE",
},
Target = {
Help = "Override target location",
Type = "string",
},
}
nodegen.add_evaluator("CSharpExe", _csexe_mt, csblueprint)
nodegen.add_evaluator("CSharpLib", _cslib_mt, csblueprint)
end

View File

@ -0,0 +1,65 @@
module(..., package.seeall)
local util = require "tundra.util"
local path = require "tundra.path"
local glob = require "tundra.syntax.glob"
local nodegen = require "tundra.nodegen"
local depgraph = require "tundra.depgraph"
local lua_exts = { ".lua" }
local luac_mt_ = nodegen.create_eval_subclass {}
local function luac(env, src, pass)
local target = "$(OBJECTDIR)/" .. path.drop_suffix(src) .. ".luac"
return target, depgraph.make_node {
Env = env,
Pass = pass,
Label = "LuaC $(@)",
Action = "$(LUAC) -o $(@) -- $(<)",
InputFiles = { src },
OutputFiles = { target },
ImplicitInputs = { "$(LUAC)" },
}
end
function luac_mt_:create_dag(env, data, deps)
local files = {}
local deps = {}
local inputs = {}
local action_fragments = {}
for _, base_dir in ipairs(data.Dirs) do
local lua_files = glob.Glob { Dir = base_dir, Extensions = lua_exts }
local dir_len = base_dir:len()
for _, filename in pairs(lua_files) do
local rel_name = filename:sub(dir_len+2)
local pkg_name = rel_name:gsub("[/\\]", "."):gsub("%.lua$", "")
inputs[#inputs + 1] = filename
if env:get("LUA_EMBED_ASCII", "no") == "no" then
files[#files + 1], deps[#deps + 1] = luac(env, filename, data.Pass)
else
files[#files + 1] = filename
end
action_fragments[#action_fragments + 1] = pkg_name
action_fragments[#action_fragments + 1] = files[#files]
end
end
return depgraph.make_node {
Env = env,
Label = "EmbedLuaSources $(@)",
Pass = data.Pass,
Action = "$(GEN_LUA_DATA) " .. table.concat(action_fragments, " ") .. " > $(@)",
InputFiles = inputs,
OutputFiles = { "$(OBJECTDIR)/" .. data.OutputFile },
Dependencies = deps,
ImplicitInputs = { "$(GEN_LUA_DATA)" },
}
end
local blueprint = {
Dirs = { Type = "table", Required = "true" },
OutputFile = { Type = "string", Required = "true" },
}
nodegen.add_evaluator("EmbedLuaSources", luac_mt_, blueprint)

View File

@ -0,0 +1,50 @@
module(..., package.seeall)
local decl = require "tundra.decl"
local depgraph = require "tundra.depgraph"
local common_blueprint = {
Source = {
Required = true,
Help = "Source filename",
Type = "string",
},
Target = {
Required = true,
Help = "Target filename",
Type = "string",
},
}
local function def_copy_rule(name, command, cfg_invariant)
DefRule {
Name = name,
ConfigInvariant = cfg_invariant,
Blueprint = common_blueprint,
Command = command,
Setup = function (env, data)
return {
InputFiles = { data.Source },
OutputFiles = { data.Target },
}
end,
}
end
def_copy_rule('CopyFile', '$(_COPY_FILE)')
def_copy_rule('CopyFileInvariant', '$(_COPY_FILE)', true)
def_copy_rule('HardLinkFile', '$(_HARDLINK_FILE)')
def_copy_rule('HardLinkFileInvariant', '$(_HARDLINK_FILE)', true)
function hardlink_file(env, src, dst, pass, deps)
return depgraph.make_node {
Env = env,
Annotation = "HardLink $(<)",
Action = "$(_HARDLINK_FILE)",
InputFiles = { src },
OutputFiles = { dst },
Dependencies = deps,
Pass = pass,
}
end

View File

@ -0,0 +1,28 @@
-- flex.lua - Support for FLEX
module(..., package.seeall)
local path = require "tundra.path"
DefRule {
Name = "Flex",
Command = "flex --outfile=$(@:[1]) --header-file=$(@:[2]) $(<)",
ConfigInvariant = true,
Blueprint = {
Source = { Required = true, Type = "string" },
OutputCFile = { Required = false, Type = "string" },
OutputHeaderFile = { Required = false, Type = "string" },
},
Setup = function (env, data)
local src = data.Source
local base_name = path.drop_suffix(src)
local gen_c = data.OutputCFile or (base_name .. '.c')
local gen_h = data.OutputHeaderFileFile or (base_name .. '.h')
return {
InputFiles = { src },
OutputFiles = { gen_c, gen_h },
}
end,
}

View File

@ -0,0 +1,122 @@
-- glob.lua - Glob syntax elements for declarative tundra.lua usage
module(..., package.seeall)
local util = require "tundra.util"
local path = require "tundra.path"
local decl = require "tundra.decl"
local dirwalk = require "tundra.dirwalk"
local ignored_dirs = util.make_lookup_table { ".git", ".svn", "CVS" }
local function glob(directory, recursive, filter_fn)
local result = {}
local function dir_filter(dir_name)
if not recursive or ignored_dirs[dir_name] then
return false
end
return true
end
for _, path in ipairs(dirwalk.walk(directory, dir_filter)) do
if filter_fn(path) then
result[#result + 1] = path
end
end
return result
end
-- Glob syntax - Search for source files matching extension list
--
-- Synopsis:
-- Glob {
-- Dir = "...",
-- Extensions = { ".ext", ... },
-- [Recursive = false,]
-- }
--
-- Options:
-- Dir = "directory" (required)
-- - Base directory to search in
--
-- Extensions = { ".ext1", ".ext2" } (required)
-- - List of file extensions to include
--
-- Recursive = boolean (optional, default: true)
-- - Specified whether to recurse into subdirectories
function Glob(args)
local recursive = args.Recursive
if type(recursive) == "nil" then
recursive = true
end
if not args.Extensions then
croak("no 'Extensions' specified in Glob (Dir is '%s')", args.Dir)
end
local extensions = assert(args.Extensions)
local ext_lookup = util.make_lookup_table(extensions)
return glob(args.Dir, recursive, function (fn)
local ext = path.get_extension(fn)
return ext_lookup[ext]
end)
end
-- FGlob syntax - Search for source files matching extension list with
-- configuration filtering
--
-- Usage:
-- FGlob {
-- Dir = "...",
-- Extensions = { ".ext", .... },
-- Filters = {
-- { Pattern = "/[Ww]in32/", Config = "win32-*-*" },
-- { Pattern = "/[Dd]ebug/", Config = "*-*-debug" },
-- ...
-- },
-- [Recursive = false],
-- }
local function FGlob(args)
-- Use the regular glob to fetch the file list.
local files = Glob(args)
local pats = {}
local result = {}
-- Construct a mapping from { Pattern = ..., Config = ... }
-- to { Pattern = { Config = ... } } with new arrays per config that can be
-- embedded in the source result.
for _, fitem in ipairs(args.Filters) do
if not fitem.Config then
croak("no 'Config' specified in FGlob (Pattern is '%s')", fitem.Pattern)
end
local tab = { Config = assert(fitem.Config) }
pats[assert(fitem.Pattern)] = tab
result[#result + 1] = tab
end
-- Traverse all files and see if they match any configuration filters. If
-- they do, stick them in matching list. Otherwise, just keep them in the
-- main list. This has the effect of returning an array such as this:
-- {
-- { "foo.c"; Config = "abc-*-*" },
-- { "bar.c"; Config = "*-*-def" },
-- "baz.c", "qux.m"
-- }
for _, f in ipairs(files) do
local filtered = false
for filter, list in pairs(pats) do
if f:match(filter) then
filtered = true
list[#list + 1] = f
break
end
end
if not filtered then
result[#result + 1] = f
end
end
return result
end
decl.add_function("Glob", Glob)
decl.add_function("FGlob", FGlob)

View File

@ -0,0 +1,40 @@
-- install.lua - Express file copying in unit form.
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local files = require "tundra.syntax.files"
local path = require "tundra.path"
local util = require "tundra.util"
local depgraph = require "tundra.depgraph"
local _mt = nodegen.create_eval_subclass {}
local blueprint = {
Sources = { Type = "source_list", Required = true },
TargetDir = { Type = "string", Required = true },
}
function _mt:create_dag(env, data, deps)
local my_pass = data.Pass
local sources = data.Sources
local target_dir = data.TargetDir
local copies = {}
-- all the copy operations will depend on all the incoming deps
for _, src in util.nil_ipairs(sources) do
local base_fn = select(2, path.split(src))
local target = target_dir .. '/' .. base_fn
copies[#copies + 1] = files.copy_file(env, src, target, my_pass, deps)
end
return depgraph.make_node {
Env = env,
Label = "Install group for " .. decl.Name,
Pass = my_pass,
Dependencies = copies
}
end
nodegen.add_evaluator("Install", _mt, blueprint)

View File

@ -0,0 +1,25 @@
-- ispc.lua - Support for Intel SPMD Program Compiler
module(..., package.seeall)
local path = require "tundra.path"
DefRule {
Name = "ISPC",
Command = "$(ISPCCOM)",
Blueprint = {
Source = { Required = true, Type = "string" },
},
Setup = function (env, data)
local src = data.Source
local base_name = path.drop_suffix(src)
local objFile = "$(OBJECTDIR)$(SEP)" .. base_name .. "__" .. path.get_extension(src):sub(2) .. "$(OBJECTSUFFIX)"
local hFile = "$(OBJECTDIR)$(SEP)" .. base_name .. "_ispc.h"
return {
InputFiles = { src },
OutputFiles = { objFile, hFile },
}
end,
}

View File

@ -0,0 +1,27 @@
-- lemon.lua - Support for the Lemon parser generator
module(..., package.seeall)
local path = require "tundra.path"
DefRule {
Name = "Lemon",
Command = "lemon $(<)",
ConfigInvariant = true,
Blueprint = {
Source = { Required = true, Type = "string" },
},
Setup = function (env, data)
local src = data.Source
local base_name = path.drop_suffix(src)
local gen_c = base_name .. '.c'
local gen_h = base_name .. '.h'
local gen_out = base_name .. '.out'
return {
InputFiles = { src },
OutputFiles = { gen_c, gen_h, gen_out },
}
end,
}

View File

@ -0,0 +1,312 @@
-- native.lua -- support for programs, static libraries and such
module(..., package.seeall)
local util = require "tundra.util"
local nodegen = require "tundra.nodegen"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local _native_mt = nodegen.create_eval_subclass {
DeclToEnvMappings = {
Libs = "LIBS",
Defines = "CPPDEFS",
Includes = "CPPPATH",
Frameworks = "FRAMEWORKS",
LibPaths = "LIBPATH",
},
}
local _object_mt = nodegen.create_eval_subclass({
Suffix = "$(OBJECTSUFFIX)",
Prefix = "",
Action = "$(OBJCOM)",
Label = "Object $(<)",
OverwriteOutputs = true,
}, _native_mt)
local _program_mt = nodegen.create_eval_subclass({
Suffix = "$(PROGSUFFIX)",
Prefix = "$(PROGPREFIX)",
Action = "$(PROGCOM)",
Label = "Program $(@)",
PreciousOutputs = true,
OverwriteOutputs = true,
Expensive = true,
}, _native_mt)
local _staticlib_mt = nodegen.create_eval_subclass({
Suffix = "$(LIBSUFFIX)",
Prefix = "$(LIBPREFIX)",
Action = "$(LIBCOM)",
Label = "StaticLibrary $(@)",
-- Play it safe and delete the output files of this node before re-running it.
-- Solves iterative issues with e.g. AR
OverwriteOutputs = false,
IsStaticLib = true,
}, _native_mt)
local _objgroup_mt = nodegen.create_eval_subclass({
Label = "ObjGroup $(<)",
}, _native_mt)
local _shlib_mt = nodegen.create_eval_subclass({
Suffix = "$(SHLIBSUFFIX)",
Prefix = "$(SHLIBPREFIX)",
Action = "$(SHLIBCOM)",
Label = "SharedLibrary $(@)",
PreciousOutputs = true,
OverwriteOutputs = true,
Expensive = true,
}, _native_mt)
local _extlib_mt = nodegen.create_eval_subclass({
Suffix = "",
Prefix = "",
Label = "",
}, _native_mt)
local cpp_exts = util.make_lookup_table { ".cpp", ".cc", ".cxx", ".C" }
local _is_native_mt = util.make_lookup_table { _object_mt, _program_mt, _staticlib_mt, _shlib_mt, _extlib_mt, _objgroup_mt }
function _native_mt:customize_env(env, raw_data)
if env:get('GENERATE_PDB', '0') ~= '0' then
-- Figure out the final linked PDB (the one next to the dll or exe)
if raw_data.Target then
local target = env:interpolate(raw_data.Target)
local link_pdb = path.drop_suffix(target) .. '.pdb'
env:set('_PDB_LINK_FILE', link_pdb)
else
env:set('_PDB_LINK_FILE', "$(OBJECTDIR)/" .. raw_data.Name .. ".pdb")
end
-- Keep the compiler's idea of the PDB file separate
env:set('_PDB_CC_FILE', "$(OBJECTDIR)/" .. raw_data.Name .. "_ccpdb.pdb")
env:set('_USE_PDB_CC', '$(_USE_PDB_CC_OPT)')
env:set('_USE_PDB_LINK', '$(_USE_PDB_LINK_OPT)')
end
local pch = raw_data.PrecompiledHeader
if pch and env:get('_PCH_SUPPORTED', '0') ~= '0' then
assert(pch.Header)
if not nodegen.resolve_pass(pch.Pass) then
croak("%s: PrecompiledHeader requires a valid Pass", raw_data.Name)
end
env:set('_PCH_FILE', "$(OBJECTDIR)/" .. raw_data.Name .. ".pch")
env:set('_USE_PCH', '$(_USE_PCH_OPT)')
env:set('_PCH_SOURCE', path.normalize(pch.Source))
env:set('_PCH_HEADER', pch.Header)
env:set('_PCH_PASS', pch.Pass)
if cpp_exts[path.get_extension(pch.Source)] then
env:set('PCHCOMPILE', '$(PCHCOMPILE_CXX)')
else
env:set('PCHCOMPILE', '$(PCHCOMPILE_CC)')
end
local pch_source = path.remove_prefix(raw_data.SourceDir or '', pch.Source)
if not util.array_contains(raw_data.Sources, pch_source) then
raw_data.Sources[#raw_data.Sources + 1] = pch_source
end
end
if env:has_key('MODDEF') then
env:set('_USE_MODDEF', '$(_USE_MODDEF_OPT)')
end
end
function _native_mt:create_dag(env, data, input_deps)
local build_id = env:get("BUILD_ID")
local my_pass = data.Pass
local sources = data.Sources
local libsuffix = { env:get("LIBSUFFIX") }
local shlibsuffix = { env:get("SHLIBLINKSUFFIX") }
local my_extra_deps = {}
-- Link with libraries in dependencies.
for _, dep in util.nil_ipairs(data.Depends) do
if dep.Keyword == "SharedLibrary" then
-- On Win32 toolsets, we need foo.lib
-- On UNIX toolsets, we need -lfoo
--
-- We only want to add this if the node actually produced any output (i.e
-- it's not completely filtered out.)
local node = dep:get_dag(env:get_parent())
if #node.outputs > 0 then
my_extra_deps[#my_extra_deps + 1] = node
local target = dep.Decl.Target or dep.Decl.Name
target = target .. "$(SHLIBLINKSUFFIX)"
env:append('LIBS', target)
end
elseif dep.Keyword == "StaticLibrary" then
local node = dep:get_dag(env:get_parent())
my_extra_deps[#my_extra_deps + 1] = node
if not self.IsStaticLib then
node:insert_output_files(sources, libsuffix)
end
elseif dep.Keyword == "ObjGroup" then
-- We want all .obj files
local objsuffix = { env:get("OBJECTSUFFIX") }
-- And also .res files, if we know what that is
if env:has_key("W32RESSUFFIX") then
objsuffix[#objsuffix + 1] = env:get("W32RESSUFFIX")
end
local node = dep:get_dag(env:get_parent())
my_extra_deps[#my_extra_deps + 1] = node
if not sources then sources = {} end
for _, dep in util.nil_ipairs(node.deps) do
my_extra_deps[#my_extra_deps + 1] = dep
dep:insert_output_files(sources, objsuffix)
end
else
--[[
A note about win32 import libraries:
It is tempting to add an implicit input dependency on the import
library of the linked-to shared library here; but this would be
suboptimal:
1. Because there is a dependency between the nodes themselves,
the import library generation will always run before this link
step is run. Therefore, the import lib will always exist and be
updated before this link step runs.
2. Because the import library is regenerated whenever the DLL is
relinked we would have to custom-sign it (using a hash of the
DLLs export list) to avoid relinking the executable all the
time when only the DLL's internals change.
3. The DLL's export list should be available in headers anyway,
which is already covered in the compilation of the object files
that actually uses those APIs.
Therefore the best way right now is to not tell Tundra about the
import lib at all and rely on header scanning to pick up API
changes.
An implicit input dependency would be needed however if someone
is doing funky things with their import library (adding
non-linker-generated code for example). These cases are so rare
that we can safely put them off.
]]--
end
end
-- Make sure sources are not specified more than once. This can happen when
-- there are recursive dependencies on object groups.
if data.Sources and #data.Sources > 0 then
data.Sources = util.uniq(data.Sources)
end
local aux_outputs = env:get_list("AUX_FILES_" .. self.Keyword:upper(), {})
if env:get('GENERATE_PDB', '0') ~= '0' then
aux_outputs[#aux_outputs + 1] = "$(_PDB_LINK_FILE)"
aux_outputs[#aux_outputs + 1] = "$(_PDB_CC_FILE)"
end
local extra_inputs = {}
if env:has_key('MODDEF') then
extra_inputs[#extra_inputs + 1] = "$(MODDEF)"
end
local targets = nil
if self.Action then
targets = { nodegen.get_target(data, self.Suffix, self.Prefix) }
end
local deps = util.merge_arrays(input_deps, my_extra_deps)
local dag = depgraph.make_node {
Env = env,
Label = self.Label,
Pass = data.Pass,
Action = self.Action,
PreAction = data.PreAction,
InputFiles = data.Sources,
InputFilesUntracked = data.UntrackedSources,
OutputFiles = targets,
AuxOutputFiles = aux_outputs,
ImplicitInputs = extra_inputs,
Dependencies = deps,
OverwriteOutputs = self.OverwriteOutputs,
PreciousOutputs = self.PreciousOutputs,
Expensive = self.Expensive,
}
-- Remember this dag node for IDE file generation purposes
data.__DagNode = dag
return dag
end
local native_blueprint = {
Name = {
Required = true,
Help = "Set output (base) filename",
Type = "string",
},
Sources = {
Required = true,
Help = "List of source files",
Type = "source_list",
ExtensionKey = "NATIVE_SUFFIXES",
},
UntrackedSources = {
Help = "List of input files that are not tracked",
Type = "source_list",
ExtensionKey = "NATIVE_SUFFIXES",
},
Target = {
Help = "Override target location",
Type = "string",
},
PreAction = {
Help = "Optional action to run before main action.",
Type = "string",
},
PrecompiledHeader = {
Help = "Enable precompiled header (if supported)",
Type = "table",
},
IdeGenerationHints = {
Help = "Data to support control IDE file generation",
Type = "table",
},
}
local external_blueprint = {
Name = {
Required = true,
Help = "Set name of the external library",
Type = "string",
},
}
local external_counter = 1
function _extlib_mt:create_dag(env, data, input_deps)
local name = string.format("dummy node for %s (%d)", data.Name, external_counter)
external_counter = external_counter + 1
return depgraph.make_node {
Env = env,
Label = name,
Pass = data.Pass,
Dependencies = input_deps,
}
end
nodegen.add_evaluator("Object", _object_mt, native_blueprint)
nodegen.add_evaluator("Program", _program_mt, native_blueprint)
nodegen.add_evaluator("StaticLibrary", _staticlib_mt, native_blueprint)
nodegen.add_evaluator("SharedLibrary", _shlib_mt, native_blueprint)
nodegen.add_evaluator("ExternalLibrary", _extlib_mt, external_blueprint)
nodegen.add_evaluator("ObjGroup", _objgroup_mt, native_blueprint)

View File

@ -0,0 +1,93 @@
-- osx-bundle.lua - Support for Max OS X bundles
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local files = require "tundra.syntax.files"
local path = require "tundra.path"
local util = require "tundra.util"
local depgraph = require "tundra.depgraph"
_osx_bundle_mt = nodegen.create_eval_subclass { }
_compile_nib_mt = nodegen.create_eval_subclass { }
function _osx_bundle_mt:create_dag(env, data, deps)
local bundle_dir = data.Target
local pass = data.Pass
local contents = bundle_dir .. "/Contents"
local copy_deps = {}
local infoplist = data.InfoPList
copy_deps[#copy_deps+1] = files.hardlink_file(env, data.InfoPList, contents .. "/Info.plist", pass, deps)
if data.PkgInfo then
copy_deps[#copy_deps+1] = files.hardlink_file(env, data.PkgInfo, contents .. "/PkgInfo", pass, deps)
end
if data.Executable then
local basename = select(2, path.split(data.Executable))
copy_deps[#copy_deps+1] = files.hardlink_file(env, data.Executable, contents .. "/MacOS/" .. basename, pass, deps)
end
local dirs = {
{ Tag = "Resources", Dir = contents .. "/Resources/" },
{ Tag = "MacOSFiles", Dir = contents .. "/MacOS/" },
}
for _, params in ipairs(dirs) do
local function do_copy(fn)
local basename = select(2, path.split(fn))
copy_deps[#copy_deps+1] = files.hardlink_file(env, fn, params.Dir .. basename, pass, deps)
end
local items = data[params.Tag]
for _, dep in util.nil_ipairs(nodegen.flatten_list(env:get('BUILD_ID'), items)) do
if type(dep) == "string" then
do_copy(dep)
else
local node = dep:get_dag(env)
print(node)
deps[#deps+1] = node
local files = {}
node:insert_output_files(files)
for _, fn in ipairs(files) do
do_copy(fn)
end
end
end
end
return depgraph.make_node {
Env = env,
Pass = pass,
Label = "OsxBundle " .. data.Target,
Dependencies = util.merge_arrays_2(deps, copy_deps),
}
end
function _compile_nib_mt:create_dag(env, data, deps)
return depgraph.make_node {
Env = env,
Pass = data.Pass,
Label = "CompileNib $(@)",
Action = "$(NIBCC)",
Dependencies = deps,
InputFiles = { data.Source },
OutputFiles = { "$(OBJECTDIR)/" .. data.Target },
}
end
nodegen.add_evaluator("OsxBundle", _osx_bundle_mt, {
Target = { Type = "string", Required = true, Help = "Target .app directory name" },
Executable = { Type = "string", Help = "Executable to embed" },
InfoPList = { Type = "string", Required = true, Help = "Info.plist file" },
PkgInfo = { Type = "string", Help = "PkgInfo file" },
Resources = { Type = "filter_table", Help = "Files to copy to 'Resources'" },
MacOSFiles = { Type = "filter_table", Help = "Files to copy to 'MacOS'" },
})
nodegen.add_evaluator("CompileNib", _compile_nib_mt, {
Source = { Type = "string", Required = true },
Target = { Type = "string", Required = true },
})

View File

@ -0,0 +1,51 @@
module(..., package.seeall)
function ConfigureRaw(cmdline, name, constructor)
local fh = assert(io.popen(cmdline))
local data = fh:read("*all")
fh:close()
local cpppath = {}
local libpath = {}
local libs = {}
local defines = {}
local frameworks = {}
for kind, value in data:gmatch("-([ILlD])([^ \n\r]+)") do
if kind == "I" then
cpppath[#cpppath + 1] = value
elseif kind == "D" then
defines[#defines + 1] = value
elseif kind == "L" then
libpath[#libpath + 1] = value
elseif kind == "l" then
libs[#libs + 1] = value
end
end
for value in data:gmatch("-framework ([^ \n\r]+)") do
frameworks[#frameworks + 1] = value
end
-- We don't have access to ExternalLibrary here - user has to pass it in.
return constructor({
Name = name,
Propagate = {
Env = {
FRAMEWORKS = frameworks,
CPPDEFS = defines,
CPPPATH = cpppath,
LIBS = libs,
LIBPATH = libpath
}
}
})
end
function Configure(name, ctor)
return internal_cfg("pkg-config " .. name .. " --cflags --libs", name, ctor)
end
function ConfigureWithTool(tool, name, ctor)
return internal_cfg(tool .. " --cflags --libs", name, ctor)
end

View File

@ -0,0 +1,29 @@
-- testsupport.lua: A simple UpperCaseFile unit used for Tundra's test harness
module(..., package.seeall)
local util = require 'tundra.util'
local nodegen = require 'tundra.nodegen'
local depgraph = require 'tundra.depgraph'
local mt = nodegen.create_eval_subclass {}
function mt:create_dag(env, data, deps)
return depgraph.make_node {
Env = env,
Pass = data.Pass,
Label = "UpperCaseFile \$(@)",
Action = "tr a-z A-Z < \$(<) > \$(@)",
InputFiles = { data.InputFile },
OutputFiles = { data.OutputFile },
Dependencies = deps,
}
end
nodegen.add_evaluator("UpperCaseFile", mt, {
Name = { Type = "string", Required = "true" },
InputFile = { Type = "string", Required = "true" },
OutputFile = { Type = "string", Required = "true" },
})

View File

@ -0,0 +1,62 @@
module(..., package.seeall)
unit_test('scalar interpolation', function (t)
local e = require 'tundra.environment'
local e1, e2, e3
e1 = e.create(nil, { Foo="Foo", Baz="Strut" })
e2 = e1:clone({ Foo="Bar" })
e3 = e1:clone({ Baz="c++" })
t:check_equal(e1:get("Foo"), "Foo")
t:check_equal(e1:get("Baz"), "Strut")
t:check_equal(e2:get("Foo"), "Bar")
t:check_equal(e2:get("Baz"), "Strut")
t:check_equal(e3:get("Fransos", "Ost"), "Ost")
e1:set("Foo", "Foo")
t:check_equal(e1:interpolate("$(Foo)"), "Foo")
t:check_equal(e1:interpolate("$(Foo:u)"), "FOO")
t:check_equal(e1:interpolate("$(Foo:l)"), "foo")
t:check_equal(e1:interpolate("$(Foo) $(Baz)"), "Foo Strut")
t:check_equal(e2:interpolate("$(Foo) $(Baz)"), "Bar Strut")
t:check_equal(e3:interpolate("$(Foo) $(Baz)"), "Foo c++")
t:check_equal(e1:interpolate("a $(<)", { ['<'] = "foo" }), "a foo")
e1:set("FILE", "foo/bar.txt")
t:check_equal(e1:interpolate("$(FILE:B)"), "foo/bar")
t:check_equal(e1:interpolate("$(FILE:F)"), "bar.txt")
t:check_equal(e1:interpolate("$(FILE:D)"), "foo")
end)
unit_test('list interpolation', function (t)
local e = require 'tundra.environment'
local e1 = e.create()
e1:set("Foo", { "Foo" })
t:check_equal(e1:interpolate("$(Foo)"), "Foo")
e1:set("Foo", { "Foo", "Bar" } )
t:check_equal(e1:interpolate("$(Foo)") , "Foo Bar")
t:check_equal(e1:interpolate("$(Foo:j,)"), "Foo,Bar")
t:check_equal(e1:interpolate("$(Foo:p!)") , "!Foo !Bar")
t:check_equal(e1:interpolate("$(Foo:a!)") , "Foo! Bar!")
t:check_equal(e1:interpolate("$(Foo:p-I:j__)") , "-IFoo__-IBar")
t:check_equal(e1:interpolate("$(Foo:j\\:)"), "Foo:Bar")
t:check_equal(e1:interpolate("$(Foo:u)"), "FOO BAR")
t:check_equal(e1:interpolate("$(Foo:[2])"), "Bar")
t:check_equal(e1:interpolate("$(Foo:Aoo)"), "Foo Baroo")
t:check_equal(e1:interpolate("$(Foo:PF)"), "Foo FBar")
local lookaside = {
['@'] = 'output',
['<'] = { 'a', 'b' },
}
t:check_equal(e1:interpolate("$(Foo) $(<)=$(@)", lookaside), "Foo Bar a b=output")
-- Verify interpolation caching is cleared when keys change.
e1:set("Foo", { "Baz" })
t:check_equal(e1:interpolate("$(Foo) $(<)=$(@)", lookaside), "Baz a b=output")
end)

View File

@ -0,0 +1,81 @@
module(..., package.seeall)
local path = require "tundra.path"
local native = require "tundra.native"
local function check_path(t, p, expected)
p = p:gsub('\\', '/')
t:check_equal(p, expected)
end
unit_test('path.normalize', function (t)
check_path(t, path.normalize("foo"), "foo")
check_path(t, path.normalize("foo/bar"), "foo/bar")
check_path(t, path.normalize("foo//bar"), "foo/bar")
check_path(t, path.normalize("foo/./bar"), "foo/bar")
check_path(t, path.normalize("foo/../bar"), "bar")
check_path(t, path.normalize("../bar"), "../bar")
check_path(t, path.normalize("foo/../../bar"), "../bar")
end)
unit_test('path.join', function (t)
check_path(t, path.join("foo", "bar"), "foo/bar")
check_path(t, path.join("foo", "../bar"), "bar")
check_path(t, path.join("/foo", "bar"), "/foo/bar")
end)
unit_test('path.split', function (t)
local function check_split(p, expected_dir, expected_fn)
local dir, fn = path.split(p)
dir = dir:gsub('\\', '/')
fn = fn:gsub('\\', '/')
t:check_equal(dir, expected_dir)
t:check_equal(fn, expected_fn)
end
check_split("", ".", "")
check_split("foo", ".", "foo")
check_split("foo/bar", "foo", "bar")
check_split("/foo/bar", "/foo", "bar")
check_split("x:\\foo\\bar", "x:/foo", "bar")
end)
unit_test('path.get_filename_dir', function (t)
t:check_equal(path.get_filename_dir("foo/bar"), "foo")
t:check_equal(path.get_filename_dir("foo"), "")
end)
unit_test('path.get_filename', function (t)
t:check_equal(path.get_filename("foo/bar"), "bar")
t:check_equal(path.get_filename("foo"), "foo")
end)
unit_test('path.get_extension', function (t)
t:check_equal(path.get_extension("foo"), "")
t:check_equal(path.get_extension("foo."), ".")
t:check_equal(path.get_extension("foo.c"), ".c")
t:check_equal(path.get_extension("foo/bar/.c"), ".c")
t:check_equal(path.get_extension("foo/bar/baz.cpp"), ".cpp")
end)
unit_test('path.drop_suffix', function (t)
t:check_equal(path.drop_suffix("foo.c"), "foo")
t:check_equal(path.drop_suffix("foo/bar.c"), "foo/bar")
t:check_equal(path.drop_suffix("/foo/bar.c"), "/foo/bar")
end)
unit_test('path.get_filename_base', function (t)
t:check_equal(path.get_filename_base("foo1"), "foo1")
t:check_equal(path.get_filename_base("foo2.c"), "foo2")
t:check_equal(path.get_filename_base("/path/to/foo3"), "foo3")
t:check_equal(path.get_filename_base("/path/to/foo4.c"), "foo4")
end)
unit_test('path.is_absolute', function (t)
t:check_equal(path.is_absolute("/foo") and "true" or "false", "true")
t:check_equal(path.is_absolute("foo") and "true" or "false", "false")
if native.host_platform == "windows" then
t:check_equal(path.is_absolute("x:\\foo") and "true" or "false", "true")
end
end)

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply(env, options)
tundra.unitgen.load_toolset("gcc-osx", env)
env:set_many {
["CC"] = "clang",
["CXX"] = "clang++",
["LD"] = "clang",
}
end

View File

@ -0,0 +1,28 @@
module(..., package.seeall)
local frameworkDir = "c:\\Windows\\Microsoft.NET\\Framework"
local defaultFrameworkVersion = "v3.5"
function apply(env, options)
tundra.unitgen.load_toolset("generic-dotnet", env)
local version = options and assert(options.Version) or defaultFrameworkVersion
env:set_external_env_var('FrameworkDir', frameworkDir)
env:set_external_env_var('FrameworkVersion', version)
local binPath = frameworkDir .. "\\" .. version
env:set_external_env_var('PATH', binPath .. ";" .. env:get_external_env_var('PATH'))
-- C# support
env:set_many {
["DOTNET_SUFFIXES"] = { ".cs" },
["DOTNET_SUFFIXES_RESOURCE"] = { ".resource" },
["CSC"] = "csc.exe",
["CSPROGSUFFIX"] = ".exe",
["CSLIBSUFFIX"] = ".dll",
["CSRESGEN"] = "resgen $(<) $(@)",
["_CSC_COMMON"] = "-warn:$(CSC_WARNING_LEVEL) /nologo $(CSLIBPATH:b:p/lib\\:) $(CSRESOURCES:b:p/resource\\:) $(CSLIBS:p/reference\\::A.dll)",
["CSCLIBCOM"] = "$(CSC) $(_CSC_COMMON) $(CSCOPTS) -target:library -out:$(@:b) $(<:b)",
["CSCEXECOM"] = "$(CSC) $(_CSC_COMMON) $(CSCOPTS) -target:exe -out:$(@:b) $(<:b)",
}
end

View File

@ -0,0 +1,12 @@
module(..., package.seeall)
function apply(env, options)
-- load the generic assembly toolset first
tundra.unitgen.load_toolset("generic-asm", env)
env:set_many {
["ASM"] = "as",
["ASMCOM"] = "$(ASM) -o $(@) $(ASMDEFS:p-D) $(ASMOPTS) $(<)",
["ASMINC_KEYWORDS"] = { ".include" },
}
end

View File

@ -0,0 +1,19 @@
module(..., package.seeall)
function apply(env, options)
-- load the generic GCC toolset first
tundra.unitgen.load_toolset("gcc", env)
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".m", ".mm", ".a", ".o" },
["CXXEXTS"] = { "cpp", "cxx", "cc", "mm" },
["FRAMEWORKS"] = "",
["SHLIBPREFIX"] = "lib",
["SHLIBOPTS"] = "-shared",
["_OS_CCOPTS"] = "$(FRAMEWORKS:p-F)",
["SHLIBCOM"] = "$(LD) $(SHLIBOPTS) $(LIBPATH:p-L) $(LIBS:p-l) $(FRAMEWORKS:p-framework ) -o $(@) $(<)",
["PROGCOM"] = "$(LD) $(PROGOPTS) $(LIBPATH:p-L) $(LIBS:p-l) $(FRAMEWORKS:p-framework ) -o $(@) $(<)",
["OBJCCOM"] = "$(CCCOM)", -- objc uses same commandline
["NIBCC"] = "ibtool --output-format binary1 --compile $(@) $(<)",
}
end

View File

@ -0,0 +1,30 @@
module(..., package.seeall)
function apply(env, options)
-- load the generic C toolset first
tundra.unitgen.load_toolset("generic-cpp", env)
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".a", ".o" },
["OBJECTSUFFIX"] = ".o",
["LIBPREFIX"] = "lib",
["LIBSUFFIX"] = ".a",
["_GCC_BINPREFIX"] = "",
["CC"] = "$(_GCC_BINPREFIX)gcc",
["CXX"] = "$(_GCC_BINPREFIX)g++",
["LIB"] = "$(_GCC_BINPREFIX)ar",
["LD"] = "$(_GCC_BINPREFIX)gcc",
["_OS_CCOPTS"] = "",
["_OS_CXXOPTS"] = "",
["CCCOM"] = "$(CC) $(_OS_CCOPTS) -c $(CPPDEFS:p-D) $(CPPPATH:f:p-I) $(CCOPTS) $(CCOPTS_$(CURRENT_VARIANT:u)) -o $(@) $(<)",
["CXXCOM"] = "$(CXX) $(_OS_CXXOPTS) -c $(CPPDEFS:p-D) $(CPPPATH:f:p-I) $(CXXOPTS) $(CXXOPTS_$(CURRENT_VARIANT:u)) -o $(@) $(<)",
["PROGOPTS"] = "",
["PROGCOM"] = "$(LD) $(PROGOPTS) $(LIBPATH:p-L) -o $(@) $(<) $(LIBS:p-l)",
["PROGPREFIX"] = "",
["LIBOPTS"] = "",
["LIBCOM"] = "$(LIB) -rs $(LIBOPTS) $(@) $(<)",
["SHLIBPREFIX"] = "lib",
["SHLIBOPTS"] = "-shared",
["SHLIBCOM"] = "$(LD) $(SHLIBOPTS) $(LIBPATH:p-L) -o $(@) $(<) $(LIBS:p-l)",
}
end

View File

@ -0,0 +1,78 @@
module(..., package.seeall)
local path = require "tundra.path"
local util = require "tundra.util"
local boot = require "tundra.boot"
local scanner = require "tundra.scanner"
local depgraph = require "tundra.depgraph"
local default_keywords = { "include" }
local default_bin_keywords = { "incbin" }
local function get_asm_scanner(env, fn)
local function test_bool(name, default)
val = env:get(name, default)
if val == "yes" or val == "true" or val == "1" then
return 1
else
return 0
end
end
local function new_scanner()
local paths = util.map(env:get_list("ASMINCPATH"), function (v) return env:interpolate(v) end)
local data = {
Paths = paths,
Keywords = env:get_list("ASMINC_KEYWORDS", default_keywords),
KeywordsNoFollow = env:get_list("ASMINC_BINARY_KEYWORDS", default_bin_keywords),
RequireWhitespace = test_bool("ASMINC_REQUIRE_WHITESPACE", "yes"),
UseSeparators = test_bool("ASMINC_USE_SEPARATORS", "yes"),
BareMeansSystem = test_bool("ASMINC_BARE_MEANS_SYSTEM", "no"),
}
return scanner.make_generic_scanner(data)
end
return env:memoize("ASMINCPATH", "_asm_scanner", new_scanner)
end
-- Register implicit make functions for assembly files.
-- These functions are called to transform source files in unit lists into
-- object files. This function is registered as a setup function so it will be
-- run after user modifications to the environment, but before nodes are
-- processed. This way users can override the extension lists.
local function generic_asm_setup(env)
local _assemble = function(env, pass, fn)
local object_fn = path.make_object_filename(env, fn, '$(OBJECTSUFFIX)')
return depgraph.make_node {
Env = env,
Label = 'Asm $(@)',
Pass = pass,
Action = "$(ASMCOM)",
InputFiles = { fn },
OutputFiles = { object_fn },
Scanner = get_asm_scanner(env, fn),
}
end
for _, ext in ipairs(env:get_list("ASM_EXTS")) do
env:register_implicit_make_fn(ext, _assemble)
end
end
function apply(_outer_env, options)
_outer_env:add_setup_function(generic_asm_setup)
_outer_env:set_many {
["ASM_EXTS"] = { ".s", ".asm" },
["ASMINCPATH"] = {},
["ASMDEFS"] = "",
["ASMDEFS_DEBUG"] = "",
["ASMDEFS_PRODUCTION"] = "",
["ASMDEFS_RELEASE"] = "",
["ASMOPTS"] = "",
["ASMOPTS_DEBUG"] = "",
["ASMOPTS_PRODUCTION"] = "",
["ASMOPTS_RELEASE"] = "",
}
end

View File

@ -0,0 +1,103 @@
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local boot = require "tundra.boot"
local util = require "tundra.util"
local path = require "tundra.path"
local scanner = require "tundra.scanner"
local depgraph = require "tundra.depgraph"
local scanner_cache = {}
function get_cpp_scanner(env, fn)
local paths = util.map(env:get_list("CPPPATH"), function (v) return env:interpolate(v) end)
return scanner.make_cpp_scanner(paths)
end
-- Register implicit make functions for C, C++ and Objective-C files.
-- These functions are called to transform source files in unit lists into
-- object files. This function is registered as a setup function so it will be
-- run after user modifications to the environment, but before nodes are
-- processed. This way users can override the extension lists.
local function generic_cpp_setup(env)
local _anyc_compile = function(env, pass, fn, label, action)
local object_fn = path.make_object_filename(env, fn, '$(OBJECTSUFFIX)')
local output_files = { object_fn }
local pch_source = env:get('_PCH_SOURCE', '')
local implicit_inputs = nil
if fn == pch_source then
label = 'Precompiled header'
pass = nodegen.resolve_pass(env:get('_PCH_PASS', ''))
action = "$(PCHCOMPILE)"
output_files = { "$(_PCH_FILE)", object_fn }
elseif pch_source ~= '' and fn ~= pch_source then
-- It would be good to make all non-pch source files dependent upon the .pch node.
-- That would require that we generate the .pch node before generating these nodes.
-- As it stands presently, when .pch compilation fails, the remaining sources
-- fail to compile, but if the dependencies were correctly setup, then they wouldn't
-- even try to compile.
end
return depgraph.make_node {
Env = env,
Label = label .. ' $(<)',
Pass = pass,
Action = action,
InputFiles = { fn },
OutputFiles = output_files,
ImplicitInputs = implicit_inputs,
Scanner = get_cpp_scanner(env, fn),
}
end
local mappings = {
["CCEXTS"] = { Label="Cc", Action="$(CCCOM)" },
["CXXEXTS"] = { Label="C++", Action="$(CXXCOM)" },
["OBJCEXTS"] = { Label="ObjC", Action="$(OBJCCOM)" },
}
for key, setup in pairs(mappings) do
for _, ext in ipairs(env:get_list(key)) do
env:register_implicit_make_fn(ext, function(env, pass, fn)
return _anyc_compile(env, pass, fn, setup.Label, setup.Action)
end)
end
end
end
function apply(_outer_env, options)
_outer_env:add_setup_function(generic_cpp_setup)
_outer_env:set_many {
["IGNORED_AUTOEXTS"] = { ".h", ".hpp", ".hh", ".hxx", ".inl" },
["CCEXTS"] = { "c" },
["CXXEXTS"] = { "cpp", "cxx", "cc" },
["OBJCEXTS"] = { "m" },
["PROGSUFFIX"] = "$(HOSTPROGSUFFIX)",
["SHLIBSUFFIX"] = "$(HOSTSHLIBSUFFIX)",
["CPPPATH"] = "",
["CPPDEFS"] = "",
["LIBS"] = "",
["LIBPATH"] = "$(OBJECTDIR)",
["CCOPTS"] = "",
["CXXOPTS"] = "",
["CPPDEFS_DEBUG"] = "",
["CPPDEFS_PRODUCTION"] = "",
["CPPDEFS_RELEASE"] = "",
["CCOPTS_DEBUG"] = "",
["CCOPTS_PRODUCTION"] = "",
["CCOPTS_RELEASE"] = "",
["CXXOPTS_DEBUG"] = "",
["CXXOPTS_PRODUCTION"] = "",
["CXXOPTS_RELEASE"] = "",
["SHLIBLINKSUFFIX"] = "",
}
end

View File

@ -0,0 +1,17 @@
module(..., package.seeall)
local function generic_dotnet_setup(env)
end
function apply(env, options)
env:add_setup_function(generic_dotnet_setup)
env:set_many {
["CSLIBS"] = "", -- assembly references
["CSLIBPATH"] = {}, -- assembly directories
["CSCOPTS"] = "-optimize",
["CSRESOURCES"] = "",
["CSC_WARNING_LEVEL"] = "4",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply(env, options)
env:set_many {
["ISPC_SUFFIXES"] = { ".ispc", },
["ISPC"] = "ispc",
["ISPCOPTS"] = "",
["ISPCCOM"] = "$(ISPC) $(ISPCOPTS) -o $(@:[1]) -h $(@:[2]) $(<)",
}
end

View File

@ -0,0 +1,24 @@
module(..., package.seeall)
function apply(env, options)
-- load the generic GCC toolset first
tundra.unitgen.load_toolset("gcc", env)
-- load support for win32 resource compilation
tundra.unitgen.load_toolset("win32-rc", env)
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".a", ".o", ".rc" },
["OBJECTSUFFIX"] = ".o",
["LIBPREFIX"] = "",
["LIBSUFFIX"] = ".a",
["W32RESSUFFIX"] = ".o",
["CPPDEFS"] = "_WIN32",
["_CPPDEFS"] = "$(CPPDEFS:p/D) $(CPPDEFS_$(CURRENT_VARIANT:u):p/D)",
["RC"] = "windres",
["RCOPTS"] = "",
["RCCOM"] = "$(RC) $(RCOPTS) --output=$(@:b) $(CPPPATH:b:p-I) --input=$(<:b)",
["SHLIBLINKSUFFIX"] = ".a",
}
end

View File

@ -0,0 +1,17 @@
module(..., package.seeall)
function apply(env, options)
tundra.unitgen.load_toolset("generic-dotnet", env)
env:set_many {
["DOTNET_SUFFIXES"] = { ".cs" },
["DOTNET_SUFFIXES_RESOURCE"] = { ".resource" },
["CSC"] = "gmcs",
["CSPROGSUFFIX"] = ".exe",
["CSLIBSUFFIX"] = ".dll",
["CSRESGEN"] = "resgen2 $(<) $(@)",
["_CSC_COMMON"] = "-warn:$(CSC_WARNING_LEVEL) /nologo $(CSLIBPATH:p-lib\\:) $(CSRESOURCES:p-resource\\:) $(CSLIBS:p-reference\\::A.dll)",
["CSCLIBCOM"] = "$(CSC) $(_CSC_COMMON) $(CSCOPTS) -target:library -out:$(@) $(<)",
["CSCEXECOM"] = "$(CSC) $(_CSC_COMMON) $(CSCOPTS) -target:exe -out:$(@) $(<)",
}
end

View File

@ -0,0 +1,8 @@
module(..., package.seeall)
local vscommon = require "tundra.tools.msvc-vscommon"
function apply(env, options)
vscommon.apply_msvc_visual_studio("9.0", env, options)
end

View File

@ -0,0 +1,8 @@
module(..., package.seeall)
local vscommon = require "tundra.tools.msvc-vscommon"
function apply(env, options)
vscommon.apply_msvc_visual_studio("10.0", env, options)
end

View File

@ -0,0 +1,8 @@
module(..., package.seeall)
local vscommon = require "tundra.tools.msvc-vscommon"
function apply(env, options)
vscommon.apply_msvc_visual_studio("11.0", env, options)
end

View File

@ -0,0 +1,8 @@
module(..., package.seeall)
local vscommon = require "tundra.tools.msvc-vscommon"
function apply(env, options)
vscommon.apply_msvc_visual_studio("12.0", env, options)
end

View File

@ -0,0 +1,267 @@
-- msvc-vscommon.lua - utility code for all versions of Visual Studio
module(..., package.seeall)
local native = require "tundra.native"
local os = require "os"
-- Visual Studio tooling layout
local vc_bin_map = {
["x86"] = {
["x86"] = "",
["x64"] = "x86_amd64",
["arm"] = "x86_arm",
},
["x64"] = {
["x86"] = "",
["x64"] = "amd64",
["arm"] = "x86_arm", -- is this really legal?
},
}
local vc_lib_map = {
["x86"] = {
["x86"] = "",
["x64"] = "amd64",
["arm"] = "arm",
},
["x64"] = {
["x86"] = "",
["x64"] = "amd64",
["arm"] = "arm",
},
}
-- Windows SDK layout
local pre_win8_sdk_dir = {
["bin"] = "bin",
["include"] = "include",
["lib"] = "lib",
}
local win8_sdk_dir = {
["bin"] = "bin",
["include"] = "include",
["lib"] = "lib\\win8\\um",
}
local win81_sdk_dir = {
["bin"] = "bin",
["include"] = "include",
["lib"] = "lib\\winv6.3\\um",
}
local pre_win8_sdk = {
["x86"] = {
["bin"] = "",
["include"] = "",
["lib"] = "",
},
["x64"] = {
["bin"] = "x64",
["include"] = "",
["lib"] = "x64",
},
}
local post_win8_sdk = {
["x86"] = {
["bin"] = "x86",
["include"] = { "shared", "um" },
["lib"] = "x86",
},
["x64"] = {
["bin"] = "x64",
["include"] = { "shared", "um" },
["lib"] = "x64",
},
["arm"] = {
["bin"] = "arm",
["include"] = { "shared", "um" },
["lib"] = "arm",
},
}
-- Each quadruplet specifies a registry key value that gets us the SDK location,
-- followed by a folder structure (for each supported target architecture)
-- and finally the corresponding bin, include and lib folder's relative location
local sdk_map = {
["9.0"] = { "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows\\v6.0A", "InstallationFolder", pre_win8_sdk_dir, pre_win8_sdk },
["10.0"] = { "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows\\v7.0A", "InstallationFolder", pre_win8_sdk_dir, pre_win8_sdk },
["11.0"] = { "SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots", "KitsRoot", win8_sdk_dir, post_win8_sdk },
["12.0"] = { "SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots", "KitsRoot81", win81_sdk_dir, post_win8_sdk },
}
local function get_host_arch()
local snative = native.getenv("PROCESSOR_ARCHITECTURE")
local swow = native.getenv("PROCESSOR_ARCHITEW6432", "")
if snative == "AMD64" or swow == "AMD64" then
return "x64"
elseif snative == "IA64" or swow == "IA64" then
return "itanium";
else
return "x86"
end
end
function path_combine(path, path_to_append)
if path == nil then
return path_to_append
end
if path:find("\\$") then
return path .. path_to_append
end
return path .. "\\" .. path_to_append
end
function path_it(maybe_list)
if type(maybe_list) == "table" then
return ipairs(maybe_list)
end
return ipairs({maybe_list})
end
function apply_msvc_visual_studio(version, env, options)
-- NOTE: don't make changes to `env` until you've asserted
-- that the requested version is in fact installed,
-- the `vs-wild` toolset will call this function
-- repeatedly with a the next version but the same `env`,
-- if a version fails (assert/error)
if native.host_platform ~= "windows" then
error("the msvc toolset only works on windows hosts")
end
-- Load basic MSVC environment setup first.
-- We're going to replace the paths to some tools.
tundra.unitgen.load_toolset('msvc', env)
options = options or {}
local target_arch = options.TargetArch or "x86"
local host_arch = options.HostArch or get_host_arch()
local sdk_version = options.SdkVersion or version -- we identify SDKs by VS version and fallback to current version
-- We'll find any edition of VS (including Express) here
local vs_root = native.reg_query("HKLM", "SOFTWARE\\Microsoft\\VisualStudio\\SxS\\VS7", version)
assert(vs_root, "The requested version of Visual Studio isn't installed")
vs_root = string.gsub(vs_root, "\\+$", "\\")
local vc_lib
local vc_bin
vc_bin = vc_bin_map[host_arch][target_arch]
if not vc_bin then
errorf("can't build target arch %s on host arch %s", target_arch, host_arch)
end
vc_bin = vs_root .. "vc\\bin\\" .. vc_bin
vc_lib = vs_root .. "vc\\lib\\" .. vc_lib_map[host_arch][target_arch]
--
-- Now fix up the SDK
--
local sdk_root
local sdk_bin
local sdk_include = {}
local sdk_lib
local sdk = sdk_map[sdk_version]
assert(sdk, "The requested version of Visual Studio isn't supported")
sdk_root = native.reg_query("HKLM", sdk[1], sdk[2])
assert(sdk_root, "The requested version of the SDK isn't installed")
sdk_root = string.gsub(sdk_root, "\\+$", "\\")
local sdk_dir_base = sdk[3]
local sdk_dir = sdk[4][target_arch]
assert(sdk_dir, "The target platform architecture isn't supported by the SDK")
sdk_bin = sdk_root .. sdk_dir_base["bin"] .. "\\" .. sdk_dir["bin"]
local sdk_dir_base_include = sdk_dir_base["include"]
for _, v in path_it(sdk_dir["include"]) do
sdk_include[#sdk_include + 1] = sdk_root .. sdk_dir_base_include .. "\\" .. v
end
sdk_lib = sdk_root .. sdk_dir_base["lib"] .. "\\" .. sdk_dir["lib"]
--
-- Tools
--
local cl_exe = '"' .. path_combine(vc_bin, "cl.exe") .. '"'
local lib_exe = '"' .. path_combine(vc_bin, "lib.exe") .. '"'
local link_exe = '"' .. path_combine(vc_bin, "link.exe") .. '"'
local rc_exe = '"' .. path_combine(sdk_bin, "rc.exe") .. '"' -- pickup the Resource Compiler from the SDK
env:set('CC', cl_exe)
env:set('CXX', cl_exe)
env:set('LIB', lib_exe)
env:set('LD', link_exe)
env:set('RC', rc_exe)
if sdk_version == "9.0" then
env:set("RCOPTS", "") -- clear the "/nologo" option (it was first added in VS2010)
end
if version == "12.0" then
-- Force MSPDBSRV.EXE
env:set("CCOPTS", "/FS")
env:set("CXXOPTS", "/FS")
end
-- Wire-up the external environment
env:set_external_env_var('VSINSTALLDIR', vs_root)
env:set_external_env_var('VCINSTALLDIR', vs_root .. "\\vc")
env:set_external_env_var('DevEnvDir', vs_root .. "Common7\\IDE")
local include = {}
for _, v in ipairs(sdk_include) do
include[#include + 1] = v
end
include[#include + 1] = vs_root .. "VC\\ATLMFC\\INCLUDE"
include[#include + 1] = vs_root .. "VC\\INCLUDE"
env:set_external_env_var("WindowsSdkDir", sdk_root)
env:set_external_env_var("INCLUDE", table.concat(include, ';'))
-- if MFC isn't installed with VS
-- the linker will throw an error when looking for libs
-- Lua does not have a "does directory exist function"
-- we could use one here
local lib_str = sdk_lib .. ";" .. vs_root .. "\\VC\\ATLMFC\\lib\\" .. vc_lib_map[host_arch][target_arch] .. ";" .. vc_lib
env:set_external_env_var("LIB", lib_str)
env:set_external_env_var("LIBPATH", lib_str)
-- Modify %PATH%
local path = {}
path[#path + 1] = sdk_root
path[#path + 1] = vs_root .. "Common7\\IDE"
if "x86" == host_arch then
path[#path + 1] = vs_root .. "\\VC\\Bin"
elseif "x64" == host_arch then
path[#path + 1] = vs_root .. "\\VC\\Bin\\amd64"
elseif "arm" == host_arch then
path[#path + 1] = vs_root .. "\\VC\\Bin\\arm"
end
path[#path + 1] = vs_root .. "\\Common7\\IDE"
path[#path + 1] = env:get_external_env_var('PATH')
env:set_external_env_var("PATH", table.concat(path, ';'))
end

View File

@ -0,0 +1,23 @@
module(..., package.seeall)
local vscommon = require "tundra.tools.msvc-vscommon"
function apply(env, options)
local vsvs = options.VsVersions or { "12.0", "11.0", "10.0", "9.0" }
for _, v in ipairs(vsvs) do
local v1 = v
local success, result = xpcall(function() vscommon.apply_msvc_visual_studio(v1, env, options) end, function(err) return err end)
if success then
print("Visual Studio version " .. v1 .. " found ")
return
else
print("Visual Studio version " .. v1 .. " does not appear to be installed (" .. result .. ")")
end
end
error("Unable to find suitable version of Visual Studio (please install either version " .. table.concat(vsvs, ", ") .. " of Visual Studio to continue)")
end

View File

@ -0,0 +1,150 @@
-- msvc-winsdk.lua - Use Microsoft Windows SDK 7.1 or later to build.
module(..., package.seeall)
local native = require "tundra.native"
local os = require "os"
if native.host_platform ~= "windows" then
error("the msvc toolset only works on windows hosts")
end
local function get_host_arch()
local snative = native.getenv("PROCESSOR_ARCHITECTURE")
local swow = native.getenv("PROCESSOR_ARCHITEW6432", "")
if snative == "AMD64" or swow == "AMD64" then
return "x64"
elseif snative == "IA64" or swow == "IA64" then
return "itanium";
else
return "x86"
end
end
local compiler_dirs = {
["x86"] = {
["x86"] = "bin\\",
["x64"] = "bin\\x86_amd64\\",
["itanium"] = "bin\\x86_ia64\\",
},
["x64"] = {
["x86"] = "bin\\",
["x64"] = {
["11.0"] = "bin\\x86_amd64\\",
"bin\\amd64\\"
},
["itanium"] = "bin\\x86_ia64\\",
},
["itanium"] = {
["x86"] = "bin\\x86_ia64\\",
["itanium"] = "bin\\ia64\\",
},
}
local function setup(env, options)
options = options or {}
local target_arch = options.TargetArch or "x86"
local host_arch = options.HostArch or get_host_arch()
local vcversion = options.VcVersion or "10.0"
local binDir =
compiler_dirs[host_arch][target_arch][vcversion]
or compiler_dirs[host_arch][target_arch][1]
or compiler_dirs[host_arch][target_arch]
if not binDir then
errorf("can't build target arch %s on host arch %s", target_arch, host_arch)
end
local sdkDir;
local sdkDirIncludes;
local sdkLibDir;
local vcLibDir;
if vcversion == "11.0" then
local sdk_key = "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows\\v8.0"
sdkDir = assert(native.reg_query("HKLM", sdk_key, "InstallationFolder"))
sdkDirIncludes = { sdkDir .. "\\INCLUDE\\UM", sdkDir .. "\\INCLUDE\\SHARED" }
sdkLibDir = "LIB\\win8\\um\\"
vcLibDir = "LIB"
if "x86" == target_arch then
sdkLibDir = sdkLibDir .. "x86"
elseif "x64" == target_arch then
sdkLibDir = sdkLibDir .. "x64"
vcLibDir = "LIB\\amd64"
elseif "arm" == target_arch then
sdkLibDir = sdkLibDir .. "arm"
end
else
local sdk_key = "SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows"
sdkDir = assert(native.reg_query("HKLM", sdk_key, "CurrentInstallFolder"))
sdkDirIncludes = { sdkDir .. "\\INCLUDE" };
sdkLibDir = "LIB"
vcLibDir = "LIB"
if "x64" == target_arch then
sdkLibDir = "LIB\\x64"
vcLibDir = "LIB\\amd64"
elseif "itanium" == target_arch then
sdkLibDir = "LIB\\IA64"
vcLibDir = "LIB\\IA64"
end
end
local vc_key = "SOFTWARE\\Microsoft\\VisualStudio\\SxS\\VC7"
local vc_dir = assert(native.reg_query("HKLM", vc_key, vcversion))
if vc_dir:sub(-1) ~= '\\' then
vc_dir = vc_dir .. '\\'
end
local cl_exe = '"' .. vc_dir .. binDir .. "cl.exe" ..'"'
local lib_exe = '"' .. vc_dir .. binDir .. "lib.exe" ..'"'
local link_exe = '"' .. vc_dir .. binDir .. "link.exe" ..'"'
env:set('CC', cl_exe)
env:set('CXX', cl_exe)
env:set('LIB', lib_exe)
env:set('LD', link_exe)
-- Set up the MS SDK associated with visual studio
env:set_external_env_var("WindowsSdkDir", sdkDir)
env:set_external_env_var("INCLUDE", table.concat(sdkDirIncludes, ";") .. ";" .. vc_dir .. "\\INCLUDE")
local rc_exe
print("vcversion", vcversion)
if vcversion == "11.0" then
rc_exe = '"' .. sdkDir .. "\\bin\\x86\\rc.exe" ..'"'
else
rc_exe = '"' .. sdkDir .. "\\bin\\rc.exe" ..'"'
end
env:set('RC', rc_exe)
local libString = sdkDir .. "\\" .. sdkLibDir .. ";" .. vc_dir .. "\\" .. vcLibDir
env:set_external_env_var("LIB", libString)
env:set_external_env_var("LIBPATH", libString)
local path = { }
local vc_root = vc_dir:sub(1, -4)
if binDir ~= "\\bin\\" then
path[#path + 1] = vc_dir .. "\\bin"
end
path[#path + 1] = vc_root .. "Common7\\Tools" -- drop vc\ at end
path[#path + 1] = vc_root .. "Common7\\IDE" -- drop vc\ at end
path[#path + 1] = sdkDir
path[#path + 1] = vc_dir .. binDir
path[#path + 1] = env:get_external_env_var('PATH')
env:set_external_env_var("PATH", table.concat(path, ';'))
end
function apply(env, options)
-- Load basic MSVC environment setup first. We're going to replace the paths to
-- some tools.
tundra.unitgen.load_toolset('msvc', env)
setup(env, options)
end

View File

@ -0,0 +1,53 @@
-- msvc.lua - common definitions for all flavors of MSVC
module(..., package.seeall)
function apply(env, options)
-- load the generic C toolset first
tundra.unitgen.load_toolset("generic-cpp", env)
-- load support for win32 resource compilation
tundra.unitgen.load_toolset("win32-rc", env)
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".lib", ".obj", ".res", ".rc" },
["OBJECTSUFFIX"] = ".obj",
["LIBPREFIX"] = "",
["LIBSUFFIX"] = ".lib",
["CC"] = "cl",
["CXX"] = "cl",
["LIB"] = "lib",
["LD"] = "link",
["CPPDEFS"] = "_WIN32",
["_CPPDEFS"] = "$(CPPDEFS:p/D) $(CPPDEFS_$(CURRENT_VARIANT:u):p/D)",
["_PCH_SUPPORTED"] = "1",
["_USE_PCH_OPT"] = "/Fp$(_PCH_FILE:b) /Yu$(_PCH_HEADER)",
["_USE_PCH"] = "",
["_USE_PDB_CC_OPT"] = "/Zi /Fd$(_PDB_CC_FILE:b)",
["_USE_PDB_LINK_OPT"] = "/DEBUG /PDB:$(_PDB_LINK_FILE)",
["_USE_PDB_CC"] = "",
["_USE_PDB_LINK"] = "",
["_USE_MODDEF_OPT"] = "/DEF:$(MODDEF)",
["_USE_MODDEF"] = "",
["RC"] = "rc",
["RCOPTS"] = "/nologo",
["W32RESSUFFIX"] = ".res",
["RCCOM"] = "$(RC) $(RCOPTS) /fo$(@:b) $(_CPPDEFS) $(CPPPATH:b:p/i) $(<:b)",
["CCCOM"] = "$(CC) /c @RESPONSE|@|$(_CPPDEFS) $(CPPPATH:b:p/I) /nologo $(CCOPTS) $(CCOPTS_$(CURRENT_VARIANT:u)) $(_USE_PCH) $(_USE_PDB_CC) /Fo$(@:b) $(<:b)",
["CXXCOM"] = "$(CC) /c @RESPONSE|@|$(_CPPDEFS) $(CPPPATH:b:p/I) /nologo $(CXXOPTS) $(CXXOPTS_$(CURRENT_VARIANT:u)) $(_USE_PCH) $(_USE_PDB_CC) /Fo$(@:b) $(<:b)",
["PCHCOMPILE_CC"] = "$(CC) /c $(_CPPDEFS) $(CPPPATH:b:p/I) /nologo $(CCOPTS) $(CCOPTS_$(CURRENT_VARIANT:u)) $(_USE_PDB_CC) /Yc$(_PCH_HEADER) /Fp$(@:[1]:b) /Fo$(@:[2]:b) $(<:[1]:b)",
["PCHCOMPILE_CXX"] = "$(CXX) /c $(_CPPDEFS) $(CPPPATH:b:p/I) /nologo $(CXXOPTS) $(CXXOPTS_$(CURRENT_VARIANT:u)) $(_USE_PDB_CC) /Yc$(_PCH_HEADER) /Fp$(@:[1]:b) /Fo$(@:[2]:b) $(<:[1]:b)",
["LIBS"] = "",
["PROGOPTS"] = "",
["PROGCOM"] = "$(LD) /nologo @RESPONSE|@|$(_USE_PDB_LINK) $(PROGOPTS) $(LIBPATH:b:p/LIBPATH\\:) $(_USE_MODDEF) $(LIBS) /out:$(@:b) $(<:b:p\n)",
["LIBOPTS"] = "",
["LIBCOM"] = "$(LIB) /nologo @RESPONSE|@|$(LIBOPTS) /out:$(@:b) $(<:b:p\n)",
["PROGPREFIX"] = "",
["SHLIBLINKSUFFIX"] = ".lib",
["SHLIBPREFIX"] = "",
["SHLIBOPTS"] = "",
["SHLIBCOM"] = "$(LD) /DLL /nologo @RESPONSE|@|$(_USE_PDB_LINK) $(SHLIBOPTS) $(LIBPATH:b:p/LIBPATH\\:) $(_USE_MODDEF) $(LIBS) /out:$(@:b) $(<:b)",
["AUX_FILES_PROGRAM"] = { "$(@:B:a.exe.manifest)", "$(@:B:a.pdb)", "$(@:B:a.exp)", "$(@:B:a.lib)", "$(@:B:a.ilk)", },
["AUX_FILES_SHAREDLIBRARY"] = { "$(@:B:a.dll.manifest)", "$(@:B:a.pdb)", "$(@:B:a.exp)", "$(@:B:a.lib)", "$(@:B:a.ilk)", },
}
end

View File

@ -0,0 +1,101 @@
-- msvc6.lua - Visual Studio 6
module(..., package.seeall)
local native = require "tundra.native"
local os = require "os"
function path_combine(path, path_to_append)
if path == nil then
return path_to_append
end
if path:find("\\$") then
return path .. path_to_append
end
return path .. "\\" .. path_to_append
end
function path_it(maybe_list)
if type(maybe_list) == "table" then
return ipairs(maybe_list)
end
return ipairs({maybe_list})
end
function apply(env, options)
if native.host_platform ~= "windows" then
error("the msvc6 toolset only works on windows hosts")
end
-- Load basic MSVC environment setup first.
-- We're going to replace the paths to some tools.
tundra.unitgen.load_toolset('msvc', env)
options = options or {}
-- We'll find any edition of VS (including Express) here
local vs_root = native.reg_query("HKLM", "SOFTWARE\\Microsoft\\VisualStudio\\6.0\\Setup\\Microsoft Visual C++", "ProductDir")
assert(vs_root, "The requested version of Visual Studio isn't installed")
vs_root = string.gsub(vs_root, "\\+$", "\\")
local common_root = native.reg_query("HKLM", "SOFTWARE\\Microsoft\\VisualStudio\\6.0\\Setup", "VsCommonDir")
assert(common_root, "The requested version of Visual Studio isn't installed")
common_root = string.gsub(common_root, "\\+$", "\\")
local vc_lib
local vc_bin
vc_bin = vs_root .. "\\bin"
vc_lib = vs_root .. "\\lib"
-- Tools
local cl_exe = '"' .. path_combine(vc_bin, "cl.exe") .. '"'
local lib_exe = '"' .. path_combine(vc_bin, "lib.exe") .. '"'
local link_exe = '"' .. path_combine(vc_bin, "link.exe") .. '"'
local rc_exe = '"' .. path_combine(common_root, "MSDev98\\Bin\\rc.exe") .. '"'
env:set('CC', cl_exe)
env:set('CXX', cl_exe)
env:set('LIB', lib_exe)
env:set('LD', link_exe)
env:set('RC', rc_exe)
env:set("RCOPTS", "") -- clear the "/nologo" option (it was first added in VS2010)
-- Wire-up the external environment
env:set_external_env_var('VSINSTALLDIR', vs_root)
env:set_external_env_var('VCINSTALLDIR', vs_root .. "\\vc")
--env:set_external_env_var('DevEnvDir', vs_root .. "Common7\\IDE")
do
local include = {
path_combine(vs_root, "ATL\\INCLUDE"),
path_combine(vs_root, "INCLUDE"),
path_combine(vs_root, "MFC\\INCLUDE"),
}
env:set_external_env_var("INCLUDE", table.concat(include, ';'))
end
do
local lib = {
path_combine(vs_root, "LIB"),
path_combine(vs_root, "MFC\\LIB"),
}
local lib_str = table.concat(lib, ';')
env:set_external_env_var("LIB", lib_str)
env:set_external_env_var("LIBPATH", lib_str)
end
-- Modify %PATH%
do
local path = {
path_combine(vs_root, "BIN"),
path_combine(common_root, "MSDev98\\BIN"),
env:get_external_env_var('PATH'),
}
env:set_external_env_var("PATH", table.concat(path, ';'))
end
end

View File

@ -0,0 +1,61 @@
-- openwatcom.lua - Support for the Open Watcom compiler C/C++ compiler
module(..., package.seeall)
local native = require "tundra.native"
local os = require "os"
local function setup(env, options)
if native.host_platform ~= "windows" then
error("the openwatcom toolset only works on windows hosts")
end
assert(options, "No Options provided")
local dir = assert(options.InstallDir)
env:set_external_env_var("WATCOM", dir)
env:set_external_env_var("EDPATH", dir .. "\\EDDAT")
env:set_external_env_var("WIPFC", dir .. "\\WIPFC")
local p = native.getenv("PATH") .. ";" .. dir .. "\\BINNT\\;" .. dir .. "\\BINW\\"
print(p)
env:set_external_env_var("PATH", p)
local inc = native.getenv("INCLUDE", "")
if inc then
inc = inc .. ";"
end
env:set_external_env_var("INCLUDE", inc .. dir .. "\\H;" .. dir .. "\\H\\NT;" .. dir .. "\\H\\NT\\DIRECTX;" .. dir .. "\\H\\NT\\DDK")
end
function apply(env, options)
-- load the generic C toolset first
tundra.unitgen.load_toolset("generic-cpp", env)
setup(env, options)
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".lib", ".obj" },
["OBJECTSUFFIX"] = ".obj",
["LIBSUFFIX"] = ".lib",
["CC"] = "wcl386.exe",
["CXX"] = "wcl386.exe",
["LIB"] = "wlib.exe",
["LD"] = "wlink.exe",
["CPPDEFS"] = "_WIN32",
["CCOPTS"] = "-wx -we",
["_CPPDEFS"] = "$(CPPDEFS:p-d) $(CPPDEFS_$(CURRENT_VARIANT:u):p-d)",
["_USE_PCH_OPT"] = "",
["_USE_PCH"] = "",
["_CCCOM"] = "$(CC) /c @RESPONSE|@|$(_CPPDEFS) $(CPPPATH:b:p-i) $(CCOPTS) $(CCOPTS_$(CURRENT_VARIANT:u)) $(_USE_PCH) -fo=$(@:b) $(<:b)",
["CCCOM"] = "$(_CCCOM)",
["CXXCOM"] = "$(_CCCOM)",
["PCHCOMPILE"] = "",
["LIBS"] = "",
["PROGOPTS"] = "",
["PROGCOM"] = "", -- "$(LD) @RESPONSE|@|$(PROGOPTS) $(LIBS) /out:$(@:b) $(<:b)",
["LIBOPTS"] = "",
["LIBCOM"] = "", -- "$(LIB) @RESPONSE|@|$(LIBOPTS) /out:$(@:b) $(<:b)",
["SHLIBOPTS"] = "",
["SHLIBCOM"] = "", -- "$(LD) /nologo @RESPONSE|@|$(SHLIBOPTS) $(LIBPATH:b:p/LIBPATH\\:) $(LIBS) /out:$(@:b) $(<:b)",
}
end

View File

@ -0,0 +1,36 @@
module(..., package.seeall)
local native = require "tundra.native"
function apply(env, options)
-- load the generic C toolset first
tundra.unitgen.load_toolset("generic-cpp", env)
-- Also add assembly support.
tundra.unitgen.load_toolset("generic-asm", env)
local vbcc_root = assert(native.getenv("VBCC"), "VBCC environment variable must be set")
env:set_many {
["NATIVE_SUFFIXES"] = { ".c", ".cpp", ".cc", ".cxx", ".s", ".asm", ".a", ".o" },
["OBJECTSUFFIX"] = ".o",
["LIBPREFIX"] = "",
["LIBSUFFIX"] = ".a",
["VBCC_ROOT"] = vbcc_root,
["CC"] = vbcc_root .. "$(SEP)bin$(SEP)vc$(HOSTPROGSUFFIX)",
["LIB"] = vbcc_root .. "$(SEP)bin$(SEP)vlink$(HOSTPROGSUFFIX)",
["LD"] = vbcc_root .. "$(SEP)bin$(SEP)vc$(HOSTPROGSUFFIX)",
["ASM"] = vbcc_root .. "$(SEP)bin$(SEP)vasmm68k_mot$(HOSTPROGSUFFIX)",
["VBCC_SDK_INC"] = vbcc_root .. "$(SEP)include$(SEP)sdk",
["_OS_CCOPTS"] = "",
["_OS_CXXOPTS"] = "",
["CCCOM"] = "$(CC) $(_OS_CCOPTS) -c $(CPPDEFS:p-D) $(CPPPATH:f:p-I) $(CCOPTS) $(CCOPTS_$(CURRENT_VARIANT:u)) -o $(@) $(<)",
["ASMCOM"] = "$(ASM) -quiet -Fhunk -phxass $(ASMOPTS) $(ASMOPTS_$(CURRENT_VARIANT:u)) $(ASMDEFS:p-D) $(ASMINCPATH:f:p-I) -I$(VBCC_SDK_INC) -o $(@) $(<)",
["PROGOPTS"] = "",
["PROGCOM"] = "$(LD) $(PROGOPTS) $(LIBPATH:p-L) $(LIBS:p-l) -o $(@) $(<)",
["PROGPREFIX"] = "",
["LIBOPTS"] = "",
["LIBCOM"] = "$(LIB) -r $(LIBOPTS) -o $(@) $(<)",
["ASMINC_KEYWORDS"] = { "INCLUDE", "include" },
["ASMINC_BINARY_KEYWORDS"] = { "INCBIN", "incbin" },
}
end

View File

@ -0,0 +1,21 @@
module(..., package.seeall)
local path = require("tundra.path")
local depgraph = require("tundra.depgraph")
local gencpp = require("tundra.tools.generic-cpp")
local function compile_resource_file(env, pass, fn)
return depgraph.make_node {
Env = env,
Label = 'Rc $(@)',
Pass = pass,
Action = "$(RCCOM)",
InputFiles = { fn },
OutputFiles = { path.make_object_filename(env, fn, env:get('W32RESSUFFIX')) },
Scanner = gencpp.get_cpp_scanner(env, fn),
}
end
function apply(env, options)
env:register_implicit_make_fn("rc", compile_resource_file)
end

View File

@ -0,0 +1,12 @@
module(..., package.seeall)
function apply(env, options)
-- load the generic assembly toolset first
tundra.unitgen.load_toolset("generic-asm", env)
env:set_many {
["YASM"] = "yasm",
["ASMCOM"] = "$(YASM) -o $(@) $(ASMDEFS:p-D ) $(ASMOPTS) $(<)",
["ASMINC_KEYWORDS"] = { "%include" },
}
end

View File

@ -0,0 +1,197 @@
module(..., package.seeall)
local util = require "tundra.util"
local buildfile = require "tundra.buildfile"
local decl = require "tundra.decl"
local nodegen = require "tundra.nodegen"
function match_build_id(id, default)
assert(id)
local i = id:gmatch("[^-]+")
local platform_name, toolset, variant, subvariant = i() or default, i() or default, i() or default, i() or default
return platform_name, toolset, variant, subvariant
end
local function iter_inherits(config, name)
local tab = config
return function()
while tab do
local my_tab = tab
if not my_tab then break end
tab = my_tab.Inherit
local v = my_tab[name]
if v then return v end
end
end
end
function load_toolset(id, ...)
-- For non-qualified packages, use a default package
if not id:find("%.") then
id = "tundra.tools." .. id
end
local pkg, err = require(id)
if err then
errorf("couldn't load extension module %s: %s", id, err)
end
pkg.apply(...)
end
local function setup_env(env, build_data, tuple, build_id)
local config = tuple.Config
local variant_name = tuple.Variant.Name
if not build_id then
build_id = config.Name .. "-" .. variant_name .. "-" .. tuple.SubVariant
end
local naked_platform, naked_toolset = match_build_id(build_id)
env:set("CURRENT_PLATFORM", naked_platform) -- e.g. linux or macosx
env:set("CURRENT_TOOLSET", naked_toolset) -- e.g. gcc or msvc
env:set("CURRENT_VARIANT", tuple.Variant.Name) -- e.g. debug or release
env:set("BUILD_ID", build_id) -- e.g. linux-gcc-debug
env:set("OBJECTDIR", "$(OBJECTROOT)" .. SEP .. "$(BUILD_ID)")
for tools in iter_inherits(config, "Tools") do
for k, v in pairs(tools) do
if type(k) == "string" then
error("Tools must be a plain array - to include options keys wrap them in their own tables:\n " ..
"e.g. Tools = { { 'foo'; Option = ... }, ... }.\n Your Tools:\n" .. util.tostring(tools))
end
end
for _, data in ipairs(tools) do
local id, options
if type(data) == "table" then
id = assert(data[1])
options = data
data = id
end
if type(data) == "string" then
load_toolset(data, env, options)
elseif type(data) == "function" then
data(env, options)
else
error("bad parameters")
end
end
end
-- Incorporate matching values from the build data's Env and ReplaceEnv.
if build_data.Env then
nodegen.append_filtered_env_vars(env, build_data.Env, build_id, false)
end
if build_data.ReplaceEnv then
nodegen.replace_filtered_env_vars(env, build_data.ReplaceEnv, build_id, false)
end
-- Incorporate matching values from the config's Env and ReplaceEnv.
for env_tab in iter_inherits(config, "Env") do
nodegen.append_filtered_env_vars(env, env_tab, build_id, false)
end
for env_tab in iter_inherits(config, "ReplaceEnv") do
nodegen.replace_filtered_env_vars(env, env_tab, build_id, false)
end
-- Run post-setup functions. This typically sets up implicit make functions.
env:run_setup_functions()
return env
end
local function setup_envs(tuple, configs, default_env, build_data)
local result = {}
local top_env = setup_env(default_env:clone(), build_data, tuple)
result["__default"] = top_env
-- Use the same build id for all subconfigurations
local build_id = top_env:get("BUILD_ID")
local cfg = configs[tuple.Config.Name]
for moniker, x in util.nil_pairs(cfg.SubConfigs) do
if result[x] then
croak("duplicate subconfig name: %s", x)
end
local sub_tuple = { Config = configs[x], Variant = tuple.Variant, SubVariant = tuple.SubVariant }
if not sub_tuple.Config then
errorf("%s: no such config (in SubConfigs specification)", x)
end
local sub_env = setup_env(default_env:clone(), build_data, sub_tuple, build_id)
result[moniker] = sub_env
end
return result
end
function parse_units(build_tuples, args, passes)
if args.SyntaxExtensions then
print("*WARNING* SyntaxExtensions has been deprecated. Use require instead.")
end
for _, id in util.nil_ipairs(args.SyntaxExtensions) do
require(id)
end
local function chunk ()
local raw_nodes, default_nodes, always_nodes = decl.parse(args.Units or "units.lua")
assert(#default_nodes > 0 or #always_nodes > 0, "no default unit name to build was set")
return { raw_nodes, default_nodes, always_nodes }
end
local success, result = xpcall(chunk, buildfile.syntax_error_catcher)
if success then
return result[1], result[2], result[3]
else
print("Build script execution failed")
croak("%s", result or "")
end
end
-- Inputs
-- build_tuples - the config/variant/subvariant pairs to include in the DAG
-- args - Raw data from Build() call
-- passes - Passes specified in Build() call
-- configs - Configs specified in Build() call
function generate_dag(build_tuples, args, passes, configs, default_env)
local raw_nodes, default_nodes, always_nodes = parse_units(build_tuples, args, passes)
local results = {}
-- Let the nodegen code generate DAG nodes for all active
-- configurations/variants.
for _, tuple in pairs(build_tuples) do
printf("Generating DAG for %s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
local envs = setup_envs(tuple, configs, default_env, args)
local always_nodes, default_nodes, named_nodes = nodegen.generate_dag {
Envs = envs,
Config = tuple.Config,
Variant = tuple.Variant,
Declarations = raw_nodes,
DefaultNodes = default_nodes,
AlwaysNodes = always_nodes,
Passes = passes,
}
results[#results + 1] = {
Config = assert(tuple.Config),
Variant = assert(tuple.Variant),
SubVariant = assert(tuple.SubVariant),
AlwaysNodes = always_nodes,
DefaultNodes = default_nodes,
NamedNodes = named_nodes,
}
end
return raw_nodes, results
end

View File

@ -0,0 +1,360 @@
local _tostring = tostring
module(..., package.seeall)
function tostring(value, stack)
local str = ''
stack = stack or {}
if type(value) ~= 'table' then
if type(value) == 'string' then
str = string.format("%q", value)
else
str = _tostring(value)
end
elseif stack[value] then
return '<recursion>'
else
stack[value] = true
local auxTable = {}
for k, v in pairs(value) do
auxTable[#auxTable + 1] = k
end
table.sort(auxTable, function (a, b) return _tostring(a) < _tostring(b) end)
str = str..'{'
local separator = ""
local entry = ""
for index, fieldName in ipairs(auxTable) do
if ((tonumber(fieldName)) and (tonumber(fieldName) > 0)) then
entry = tostring(value[tonumber(fieldName)], stack)
else
entry = tostring(fieldName) .. " = " .. tostring(rawget(value, fieldName), stack)
end
str = str..separator..entry
separator = ", "
end
str = str..'}'
local mt = getmetatable(value)
if mt then
str = str .. ' @meta = ' .. tostring(mt, stack)
end
end
return str
end
function map_in_place(t, fn)
for x = 1, #t do
t[x] = fn(t[x])
end
return t
end
function map(t, fn)
local result = {}
for idx = 1, #t do
result[idx] = fn(t[idx])
end
return result
end
function mapnil(table, fn)
if not table then
return nil
else
return map(table, fn)
end
end
function get_named_arg(tab, name, context)
local v = tab[name]
if v then
return v
else
if context then
error(context .. ": argument " .. name .. " must be specified", 3)
else
error("argument " .. name .. " must be specified", 3)
end
end
end
function parse_cmdline(args, blueprint)
local index, max = 2, #args
local options, targets = {}, {}
local lookup = {}
for _, opt in ipairs(blueprint) do
if opt.Short then
lookup[opt.Short] = opt
end
if opt.Long then
lookup[opt.Long] = opt
end
end
while index <= max do
local s = args[index]
local key, val
if s:sub(1, 2) == '--' then
key, val = s:match("^%-%-([-a-zA-Z0-9]+)=(.*)$")
if not key then
key = s:sub(3)
end
elseif s:sub(1, 1) == '-' then
key = s:sub(2,2)
if s:len() > 2 then
val = s:sub(3)
end
else
table.insert(targets, s)
end
if key then
local opt = lookup[key]
if not opt then
return nil, nil, "Unknown option " .. s
end
if opt.HasValue then
if not val then
index = index + 1
val = args[index]
end
if val then
options[opt.Name] = val
else
return nil, nil, "Missing value for option "..s
end
else
local v = options[opt.Name] or 0
options[opt.Name] = v + 1
end
end
index = index + 1
end
return options, targets
end
function clone_table(t)
if t then
local r = {}
for k, v in pairs(t) do
r[k] = v
end
for k, v in ipairs(t) do
r[k] = v
end
return r
else
return nil
end
end
function deep_clone_table(t)
local function clone_value(v)
if type(v) == "table" then
return deep_clone_table(v)
else
return v
end
end
if t then
local r = {}
for k, v in pairs(t) do
r[clone_value(k)] = clone_value(v)
end
for k, v in ipairs(t) do
r[k] = clone_value(v)
end
return r
else
return nil
end
end
function clone_array(t)
local r = {}
for k, v in ipairs(t) do
r[k] = v
end
return r
end
function merge_arrays(...)
local result = {}
local count = select('#', ...)
for i = 1, count do
local tab = select(i, ...)
if tab then
for _, v in ipairs(tab) do
result[#result + 1] = v
end
end
end
return result
end
function merge_arrays_2(a, b)
if a and b then
return merge_arrays(a, b)
elseif a then
return a
elseif b then
return b
else
return {}
end
end
function matches_any(str, patterns)
for _, pattern in ipairs(patterns) do
if str:match(pattern) then
return true
end
end
return false
end
function return_nil()
end
function nil_pairs(t)
if t then
return next, t
else
return return_nil
end
end
function nil_ipairs(t)
if t then
return ipairs(t)
else
return return_nil
end
end
function clear_table(tab)
local key, val = next(tab)
while key do
tab[key] = nil
key, val = next(tab, key)
end
return tab
end
function filter(tab, predicate)
local result = {}
for _, x in ipairs(tab) do
if predicate(x) then
result[#result + 1] = x
end
end
return result
end
function filter_nil(tab, predicate)
if not predicate then return nil end
local result = {}
for _, x in ipairs(tab) do
if predicate(x) then
result[#result + 1] = x
end
end
return result
end
function filter_in_place(tab, predicate)
local i, limit = 1, #tab
while i <= limit do
if not predicate(tab[i]) then
table.remove(tab, i)
limit = limit - 1
else
i = i + 1
end
end
return tab
end
function append_table(result, items)
local offset = #result
for i = 1, #items do
result[offset + i] = items[i]
end
return result
end
function flatten(array)
local function iter(item, accum)
if type(item) == 'table' and not getmetatable(item) then
for _, sub_item in ipairs(item) do
iter(sub_item, accum)
end
else
accum[#accum + 1] = item
end
end
local accum = {}
iter(array, accum)
return accum
end
function memoize(closure)
local result = nil
return function(...)
if not result then
result = assert(closure(...))
end
return result
end
end
function uniq(array)
local seen = {}
local result = {}
for _, val in nil_ipairs(array) do
if not seen[val] then
seen[val] = true
result[#result + 1] = val
end
end
return result
end
function make_lookup_table(array)
local result = {}
for _, item in nil_ipairs(array) do
result[item] = true
end
return result
end
function table_keys(array)
local result = {}
for k, _ in nil_pairs(array) do
result[#result + 1] = k
end
return result
end
function table_values(array)
local result = {}
for _, v in nil_pairs(array) do
result[#result + 1] = v
end
return result
end
function array_contains(array, find)
for _, val in ipairs(array) do
if val == find then
return true
end
end
return false
end

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,41 @@
--
-- strict.lua
-- checks uses of undeclared global variables
-- All global variables must be 'declared' through a regular assignment
-- (even assigning nil will do) in a main chunk before being used
-- anywhere or assigned to inside a function.
--
local getinfo, error, rawset, rawget = debug.getinfo, error, rawset, rawget
local mt = getmetatable(_G)
if mt == nil then
mt = {}
setmetatable(_G, mt)
end
mt.__declared = {}
local function what ()
local d = getinfo(3, "S")
return d and d.what or "C"
end
mt.__newindex = function (t, n, v)
if not mt.__declared[n] then
local w = what()
if w ~= "main" and w ~= "C" then
error("assign to undeclared variable '"..n.."'", 2)
end
mt.__declared[n] = true
end
rawset(t, n, v)
end
mt.__index = function (t, n)
if not mt.__declared[n] and what() ~= "C" then
error("variable '"..n.."' is not declared", 2)
end
return rawget(t, n)
end

View File

@ -0,0 +1,43 @@
require "strict"
local boot = require "tundra.boot"
local actions = {
['generate-dag'] = function(build_script)
assert(build_script, "need a build script name")
boot.generate_dag_data(build_script)
end,
['generate-ide-files'] = function(build_script, ide_script)
assert(build_script, "need a build script name")
assert(ide_script, "need a generator name")
boot.generate_ide_files(build_script, ide_script)
end,
['selftest'] = function()
require "tundra.selftest"
end
}
local function main(action_name, ...)
assert(action_name, "need an action")
local action = actions[action_name]
assert(action, "unknown action '" .. action_name .. "'")
-- check if debugger was requested
for i, v in ipairs(arg) do
if v == "--lua-debugger" then
table.remove(arg, i)
require "tundra.debugger"
pause()
break
end
end
action(unpack(arg))
end
return {
main = main
}

View File

@ -0,0 +1,142 @@
module(..., package.seeall)
-- Use "strict" when developing to flag accesses to nil global variables
-- This has very low perf impact (<0.1%), so always leave it on.
require "strict"
local os = require "os"
local platform = require "tundra.platform"
local util = require "tundra.util"
local depgraph = require "tundra.depgraph"
local unitgen = require "tundra.unitgen"
local buildfile = require "tundra.buildfile"
local native = require "tundra.native"
-- This trio is so useful we want them everywhere without imports.
function _G.printf(msg, ...)
local str = string.format(msg, ...)
print(str)
end
function _G.errorf(msg, ...)
local str = string.format(msg, ...)
error(str)
end
function _G.croak(msg, ...)
local str = string.format(msg, ...)
io.stderr:write(str, "\n")
os.exit(1)
end
-- Expose benchmarking function for those times everything sucks
--
-- Wrap a function so that it prints execution times.
--
-- Usage:
-- foo = bench("foo", foo) -- benchmark function foo
function _G.bench(name, fn)
return function (...)
local t1 = native.get_timer()
local result = { fn(...) }
local t2 = native.get_timer()
printf("%s: %ss", name, native.timerdiff(t1, t2))
return unpack(result)
end
end
local environment = require "tundra.environment"
local nodegen = require "tundra.nodegen"
local decl = require "tundra.decl"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local dagsave = require "tundra.dagsave"
_G.SEP = platform.host_platform() == "windows" and "\\" or "/"
_G.Options = {
FullPaths = 1
}
local function make_default_env(build_data, add_unfiltered_vars)
local default_env = environment.create()
default_env:set_many {
["OBJECTROOT"] = "t2-output",
["SEP"] = SEP,
}
local host_platform = platform.host_platform()
do
local mod_name = "tundra.host." .. host_platform
local mod = require(mod_name)
mod.apply_host(default_env)
end
-- Add any unfiltered entries from the build data's Env and ReplaceEnv to the
-- default environment. For config environments, this will be false, because we
-- want to wait until the config's tools have run before adding any user
-- customizations.
if add_unfiltered_vars then
if build_data.Env then
nodegen.append_filtered_env_vars(default_env, build_data.Env, nil, true)
end
if build_data.ReplaceEnv then
nodegen.replace_filtered_env_vars(default_env, build_data.ReplaceEnv, nil, true)
end
end
return default_env
end
function generate_dag_data(build_script_fn)
local build_data = buildfile.run(build_script_fn)
local env = make_default_env(build_data.BuildData, false)
local raw_nodes, node_bindings = unitgen.generate_dag(
build_data.BuildTuples,
build_data.BuildData,
build_data.Passes,
build_data.Configs,
env)
dagsave.save_dag_data(
node_bindings,
build_data.DefaultVariant,
build_data.DefaultSubVariant,
build_data.ContentDigestExtensions,
build_data.Options)
end
function generate_ide_files(build_script_fn, ide_script)
-- We are generating IDE integration files. Load the specified
-- integration module rather than DAG builders.
--
-- Also, default to using full paths for all commands to aid with locating
-- sources better.
Options.FullPaths = 1
local build_data = buildfile.run(build_script_fn)
local build_tuples = assert(build_data.BuildTuples)
local raw_data = assert(build_data.BuildData)
local passes = assert(build_data.Passes)
local env = make_default_env(raw_data, true)
if not ide_script:find('.', 1, true) then
ide_script = 'tundra.ide.' .. ide_script
end
require(ide_script)
-- Generate dag
local raw_nodes, node_bindings = unitgen.generate_dag(
build_data.BuildTuples,
build_data.BuildData,
build_data.Passes,
build_data.Configs,
env)
-- Pass the build tuples directly to the generator and let it write
-- files.
nodegen.generate_ide_files(build_tuples, build_data.DefaultNodes, raw_nodes, env, raw_data.IdeGenerationHints, ide_script)
end

View File

@ -0,0 +1,253 @@
module(..., package.seeall)
local util = require "tundra.util"
local native = require "tundra.native"
local function mk_defvariant(name)
return { Name = name; Options = {} }
end
local default_variants = {
mk_defvariant "debug",
mk_defvariant "production",
mk_defvariant "release"
}
local default_subvariants = {
"default"
}
local _config_class = {}
-- Table constructor to make tundra.lua syntax a bit nicer in the Configs array
function _G.Config(args)
local name = args.Name
if not name then
error("no `Name' specified for configuration")
end
if not name:match("^[%w_]+-[%w_]+$") then
errorf("configuration name %s doesn't follow <platform>-<toolset> pattern", name)
end
if args.SubConfigs then
if not args.DefaultSubConfig then
errorf("configuration %s has `SubConfigs' but no `DefaultSubConfig'", name)
end
end
return setmetatable(args, _config_class)
end
local function analyze_targets(configs, variants, subvariants)
local build_tuples = {}
local build_configs = {}
local build_variants = {}
local build_subvariants = {}
for _, cfg in pairs(configs) do
if not cfg.Virtual then -- skip virtual configs
if not cfg.SupportedHosts then
if cfg.DefaultOnHost then
if type(cfg.DefaultOnHost) == "table" then
cfg.SupportedHosts = cfg.DefaultOnHost
else
cfg.SupportedHosts = { cfg.DefaultOnHost }
end
else
printf("1.0-compat: config %s doesn't specify SupportedHosts -- will never be built", cfg.Name);
cfg.SupportedHosts = { }
end
end
local lut = util.make_lookup_table(cfg.SupportedHosts)
if lut[native.host_platform] then
build_configs[#build_configs + 1] = cfg
end
end
end
for _, var in pairs(variants) do build_variants[#build_variants + 1] = var end
for var, _ in pairs(subvariants) do build_subvariants[#build_subvariants + 1] = var end
for _, config in ipairs(build_configs) do
if config.Virtual then
croak("can't build configuration %s directly; it is a support configuration only", config.Name)
end
for _, variant in ipairs(build_variants) do
for _, subvariant in ipairs(build_subvariants) do
build_tuples[#build_tuples + 1] = { Config = config, Variant = variant, SubVariant = subvariant }
end
end
end
if #build_tuples == 0 then
errorf("no build tuples available\n")
end
return build_tuples
end
-- Custom pcall error handler to scan for syntax errors (thrown as tables) and
-- report them without a backtrace, trying to get the filename and line number
-- right so the user can fix their build file.
function syntax_error_catcher(err_obj)
if type(err_obj) == "table" and err_obj.Class and err_obj.Message then
local i = 1
-- Walk down the stack until we find a function that isn't sourced from
-- a file. These have 'source' names that don't start with an @ sign.
-- Because we read all files into memory before executing them, this
-- will give us the source filename of the user script.
while true do
local info = debug.getinfo(i, 'Sl')
--print(util.tostring(info))
if not info then
break
end
if info.what == "C" or (info.source:sub(1, 1) == "@" and info.source ~= "@units.lua") then
i = i + 1
else
local fn = info.source
if info.source:sub(1, 1) == "@" then
fn = info.source:sub(2)
end
if info.currentline == -1 then
return string.format("%s: %s", err_obj.Class, err_obj.Message)
else
return string.format("%s(%d): %s: %s", fn, info.currentline, err_obj.Class, err_obj.Message)
end
end
end
return string.format("%s: %s", err_obj.Class, err_obj.Message)
else
return debug.traceback(err_obj, 2)
end
end
-- A place to store the result of the user's build script calling Build()
local build_result = nil
-- The Build function is the main entry point for "tundra.lua" when invoked.
function _G.Build(args)
if type(args.Configs) ~= "table" or #args.Configs == 0 then
croak("Need at least one config; got %s", util.tostring(args.Configs or "none at all"))
end
local configs, variants, subvariants = {}, {}, {}
-- Legacy support: run "Config" constructor automatically on naked tables
-- passed in Configs array.
for idx = 1, #args.Configs do
local cfg = args.Configs[idx]
if getmetatable(cfg) ~= _config_class then
cfg = Config(cfg)
args.Configs[idx] = cfg
end
configs[cfg.Name] = cfg
end
for _, dir in util.nil_ipairs(args.ScriptDirs) do
-- Make sure dir is sane and ends with a slash
dir = dir:gsub("[/\\]", SEP):gsub("[/\\]$", "")
local expr = dir .. SEP .. "?.lua"
-- Add user toolset dir first so they can override builtin scripts.
package.path = expr .. ";" .. package.path
end
if args.Variants then
for i, x in ipairs(args.Variants) do
if type(x) == "string" then
args.Variants[i] = mk_defvariant(x)
else
assert(x.Name)
if not x.Options then
x.Options = {}
end
end
end
end
local variant_array = args.Variants or default_variants
for _, variant in ipairs(variant_array) do variants[variant.Name] = variant end
local subvariant_array = args.SubVariants or default_subvariants
for _, subvariant in ipairs(subvariant_array) do subvariants[subvariant] = true end
local default_variant = variant_array[1]
if args.DefaultVariant then
for _, x in ipairs(variant_array) do
if x.Name == args.DefaultVariant then
default_variant = x
end
end
end
local default_subvariant = args.DefaultSubVariant or subvariant_array[1]
local build_tuples = analyze_targets(configs, variants, subvariants)
local passes = args.Passes or { Default = { Name = "Default", BuildOrder = 1 } }
printf("%d valid build tuples", #build_tuples)
-- Validate pass data
for id, data in pairs(passes) do
if not data.Name then
croak("Pass %s has no Name attribute", id)
elseif not data.BuildOrder then
croak("Pass %s has no BuildOrder attribute", id)
end
end
-- Assume syntax for C and DotNet is always needed
-- for now. Could possible make an option for which generator sets to load
-- in the future.
require "tundra.syntax.native"
require "tundra.syntax.dotnet"
build_result = {
BuildTuples = build_tuples,
BuildData = args,
Passes = passes,
Configs = configs,
DefaultVariant = default_variant,
DefaultSubVariant = default_subvariant,
ContentDigestExtensions = args.ContentDigestExtensions,
Options = args.Options,
}
end
function run(build_script_fn)
local f, err = io.open(build_script_fn, 'r')
if not f then
croak("%s", err)
end
local text = f:read("*all")
f:close()
local script_globals, script_globals_mt = {}, {}
script_globals_mt.__index = _G
setmetatable(script_globals, script_globals_mt)
local chunk, error_msg = loadstring(text, build_script_fn)
if not chunk then
croak("%s", error_msg)
end
setfenv(chunk, script_globals)
local success, result = xpcall(chunk, syntax_error_catcher)
if not success then
print("Build script execution failed")
croak("%s", result or "")
end
local result = build_result
build_result = nil
return result
end

View File

@ -0,0 +1,409 @@
module(..., package.seeall)
local depgraph = require "tundra.depgraph"
local util = require "tundra.util"
local scanner = require "tundra.scanner"
local dirwalk = require "tundra.dirwalk"
local platform = require "tundra.platform"
local native = require "tundra.native"
local njson = require "tundra.native.json"
local path = require "tundra.path"
local dag_dag_magic = 0x15890105
local function get_passes(nodes)
local result = {}
local seen_passes = {}
for _, node in ipairs(nodes) do
local p = node.pass
if not seen_passes[p] then
assert(type(p) == "table", "Passes must be tables, have " .. util.tostring(p))
assert(type(p.BuildOrder) == "number", "Pass BuildOrder must be a number")
result[#result + 1] = p
seen_passes[p] = true
end
end
table.sort(result, function (a, b) return a.BuildOrder < b.BuildOrder end)
local pass_lookup = {}
for index, pass in ipairs(result) do
pass_lookup[pass] = index - 1
end
return result, pass_lookup
end
local function setup_input_deps(nodes)
local producers = {}
local cwd = native.getcwd() .. SEP
local filter
if native.host_platform == 'windows' or native.host_platform == 'macosx' then
filter = function (str) return str:lower() end
else
filter = function (str) return str end
end
local node_deps = {}
-- Record producing node for all output files
for _, n in ipairs(nodes) do
for _, output in util.nil_ipairs(n.outputs) do
if not path.is_absolute(output) then
output = cwd .. output
end
output = filter(output)
if producers[output] then
errorf("file %s set to be written by more than one target:\n%s\n%s\n",
output, n.annotation, producers[output].annotation)
end
producers[output] = n
end
if n.deps then
node_deps[n] = util.make_lookup_table(n.deps)
end
end
-- Map input files to dependencies
for _, n in ipairs(nodes) do
for _, inputf in util.nil_ipairs(n.inputs) do
if not path.is_absolute(inputf) then
inputf = cwd .. inputf
end
inputf = filter(inputf)
local producer = producers[inputf]
local deps_lut = node_deps[n]
if producer and (not deps_lut or not deps_lut[producer]) then
n.deps[#n.deps + 1] = producer
if not deps_lut then
deps_lut = {}
node_deps[n] = deps_lut
end
deps_lut[producer] = true
end
end
end
end
local function get_scanners(nodes)
local scanners = {}
local scanner_to_index = {}
for _, node in ipairs(nodes) do
local scanner = node.scanner
if scanner and not scanner_to_index[scanner] then
scanner_to_index[scanner] = #scanners
scanners[#scanners + 1] = scanner
end
end
return scanners, scanner_to_index
end
local function save_passes(w, passes)
w:begin_array("Passes")
for _, s in ipairs(passes) do
w:write_string(s.Name)
end
w:end_array()
end
local function save_scanners(w, scanners)
w:begin_array("Scanners")
for _, s in ipairs(scanners) do
w:begin_object()
w:write_string(s.Kind, 'Kind')
w:begin_array("IncludePaths")
for _, path in util.nil_ipairs(s.Paths) do
w:write_string(path)
end
w:end_array()
-- Serialize specialized state for generic scanners
if s.Kind == 'generic' then
w:write_bool(s.RequireWhitespace, 'RequireWhitespace')
w:write_bool(s.UseSeparators, 'UseSeparators')
w:write_bool(s.BareMeansSystem, 'BareMeansSystem')
w:begin_array('Keywords')
for _, kw in util.nil_ipairs(s.Keywords) do
w:write_string(kw)
end
w:end_array()
w:begin_array('KeywordsNoFollow')
for _, kw in util.nil_ipairs(s.KeywordsNoFollow) do
w:write_string(kw)
end
w:end_array()
end
w:end_object()
end
w:end_array()
end
local function save_nodes(w, nodes, pass_to_index, scanner_to_index)
w:begin_array("Nodes")
for idx, node in ipairs(nodes) do
w:begin_object()
assert(idx - 1 == node.index)
if node.action then
w:write_string(node.action, "Action")
end
if node.preaction then
w:write_string(node.preaction, "PreAction")
end
w:write_string(node.annotation, "Annotation")
w:write_number(pass_to_index[node.pass], "PassIndex")
if #node.deps > 0 then
w:begin_array("Deps")
for _, dep in ipairs(node.deps) do
w:write_number(dep.index)
end
w:end_array()
end
local function dump_file_list(list, name)
if list and #list > 0 then
w:begin_array(name)
for _, fn in ipairs(list) do
w:write_string(fn)
end
w:end_array(name)
end
end
dump_file_list(node.inputs, "Inputs")
dump_file_list(node.outputs, "Outputs")
dump_file_list(node.aux_outputs, "AuxOutputs")
-- Save environment strings
local env_count = 0
for k, v in util.nil_pairs(node.env) do
env_count = env_count + 1
end
if env_count > 0 then
w:begin_array("Env")
for k, v in pairs(node.env) do
w:begin_object()
w:write_string(k, "Key")
w:write_string(v, "Value")
w:end_object()
end
w:end_array()
end
if node.scanner then
w:write_number(scanner_to_index[node.scanner], "ScannerIndex")
end
if node.overwrite_outputs then
w:write_bool(true, "OverwriteOutputs")
end
if node.is_precious then
w:write_bool(true, "PreciousOutputs")
end
if node.expensive then
w:write_bool(true, "Expensive")
end
w:end_object()
end
w:end_array()
end
local function save_configs(w, bindings, default_variant, default_subvariant)
local configs = {}
local variants = {}
local subvariants = {}
local config_index = {}
local variant_index = {}
local subvariant_index = {}
local default_config = nil
local host_platform = platform.host_platform()
for _, b in ipairs(bindings) do
if not configs[b.Config.Name] then
configs[b.Config.Name] = #config_index
config_index[#config_index+1] = b.Config.Name
end
if not variants[b.Variant.Name] then
variants[b.Variant.Name] = #variant_index
variant_index[#variant_index+1] = b.Variant.Name
end
if not subvariants[b.SubVariant] then
subvariants[b.SubVariant] = #subvariant_index
subvariant_index[#subvariant_index+1] = b.SubVariant
end
if b.Config.DefaultOnHost == host_platform then
default_config = b.Config
end
end
assert(#config_index > 0)
assert(#variant_index > 0)
assert(#subvariant_index > 0)
local function dump_str_array(array, name)
if array and #array > 0 then
w:begin_array(name)
for _, name in ipairs(array) do
w:write_string(name)
end
w:end_array()
end
end
w:begin_object("Setup")
dump_str_array(config_index, "Configs")
dump_str_array(variant_index, "Variants")
dump_str_array(subvariant_index, "SubVariants")
w:begin_array("BuildTuples")
for index, binding in ipairs(bindings) do
w:begin_object()
w:write_number(configs[binding.Config.Name], "ConfigIndex")
w:write_number(variants[binding.Variant.Name], "VariantIndex")
w:write_number(subvariants[binding.SubVariant], "SubVariantIndex")
local function store_node_index_array(nodes, name)
w:begin_array(name)
for _, node in util.nil_ipairs(nodes) do
w:write_number(node.index)
end
w:end_array()
end
store_node_index_array(binding.AlwaysNodes, "AlwaysNodes")
store_node_index_array(binding.DefaultNodes, "DefaultNodes")
w:begin_object("NamedNodes")
for name, node in pairs(binding.NamedNodes) do
w:write_number(node.index, name)
end
w:end_object()
w:end_object()
end
w:end_array()
-- m_DefaultBuildTuple
w:begin_object("DefaultBuildTuple")
if default_config then
w:write_number(configs[default_config.Name], "ConfigIndex")
else
w:write_number(-1, "ConfigIndex")
end
if default_variant then
w:write_number(variants[default_variant.Name], "VariantIndex")
else
w:write_number(-1, "VariantIndex")
end
if default_subvariant then
w:write_number(subvariants[default_subvariant], "SubVariantIndex")
else
w:write_number(-1, "SubVariantIndex")
end
w:end_object()
w:end_object()
end
local function save_signatures(w, accessed_lua_files)
w:begin_array("FileSignatures")
for _, fn in ipairs(accessed_lua_files) do
w:begin_object()
local stat = native.stat_file(fn)
if not stat.exists then
errorf("accessed file %s is gone: %s", fn, err)
end
w:write_string(fn, "File")
w:write_number(stat.timestamp, "Timestamp")
w:end_object()
end
w:end_array()
w:begin_array("GlobSignatures")
local globs = dirwalk.all_queries()
for _, glob in ipairs(globs) do
w:begin_object()
w:write_string(glob.Path, "Path")
w:begin_array("Files")
for _, fn in ipairs(glob.Files) do w:write_string(fn) end
w:end_array()
w:begin_array("SubDirs")
for _, fn in ipairs(glob.SubDirs) do w:write_string(fn) end
w:end_array()
w:end_object()
end
w:end_array()
end
local function check_deps(nodes)
for _, node in ipairs(nodes) do
for _ , dep in ipairs(node.deps) do
if dep.pass.BuildOrder > node.pass.BuildOrder then
errorf("%s (pass: %s) depends on %s in later pass (%s)", node.annotation, node.pass.Name, dep.annotation, dep.pass.Name)
end
end
end
end
function save_dag_data(bindings, default_variant, default_subvariant, content_digest_exts, misc_options)
-- Call builtin function to get at accessed file table
local accessed_lua_files = util.table_keys(get_accessed_files())
misc_options = misc_options or {}
local max_expensive_jobs = misc_options.MaxExpensiveJobs or -1
printf("save_dag_data: %d bindings, %d accessed files", #bindings, #accessed_lua_files)
local nodes = depgraph.get_all_nodes()
-- Set node indices
for idx, node in ipairs(nodes) do
node.index = idx - 1
end
-- Set up array of passes
local passes, pass_to_index = get_passes(nodes)
-- Hook up dependencies due to input files
setup_input_deps(nodes)
check_deps(nodes)
-- Find scanners
local scanners, scanner_to_index = get_scanners(nodes)
local w = njson.new('.tundra2.dag.json')
w:begin_object()
save_configs(w, bindings, default_variant, default_subvariant)
save_passes(w, passes)
save_scanners(w, scanners)
save_nodes(w, nodes, pass_to_index, scanner_to_index)
save_signatures(w, accessed_lua_files)
if content_digest_exts and #content_digest_exts > 0 then
w:begin_array("ContentDigestExtensions")
for _, ext in ipairs(content_digest_exts) do
w:write_string(ext)
end
w:end_array()
end
w:write_number(max_expensive_jobs, "MaxExpensiveCount")
w:end_object()
w:close()
end

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,91 @@
module(..., package.seeall)
local nodegen = require "tundra.nodegen"
local functions = {}
local _decl_meta = {}
_decl_meta.__index = _decl_meta
local current = nil
local function new_parser()
local obj = {
Functions = {},
Results = {},
DefaultTargets = {},
AlwaysTargets = {},
}
local outer_env = _G
local iseval = nodegen.is_evaluator
local function indexfunc(tab, var)
if iseval(var) then
-- Return an anonymous function such that
-- the code "Foo { ... }" will result in a call to
-- "nodegen.evaluate('Foo', { ... })"
return function (data)
local result = nodegen.evaluate(var, data)
obj.Results[#obj.Results + 1] = result
return result
end
end
local p = obj.Functions[var]
if p then return p end
return outer_env[var]
end
obj.FunctionMeta = { __index = indexfunc, __newindex = error }
obj.FunctionEnv = setmetatable({}, obj.FunctionMeta)
for name, fn in pairs(functions) do
obj.Functions[name] = setfenv(fn, obj.FunctionEnv)
end
obj.Functions["Default"] = function(default_obj)
obj.DefaultTargets[#obj.DefaultTargets + 1] = default_obj
end
obj.Functions["Always"] = function(always_obj)
obj.AlwaysTargets[#obj.AlwaysTargets + 1] = always_obj
end
current = setmetatable(obj, _decl_meta)
return current
end
function add_function(name, fn)
assert(name and fn)
functions[name] = fn
if current then
-- require called from within unit script
current.Functions[name] = setfenv(fn, current.FunctionEnv)
end
end
function _decl_meta:parse_rec(data)
local chunk
if type(data) == "table" then
for _, gen in ipairs(data) do
self:parse_rec(gen)
end
return
elseif type(data) == "function" then
chunk = data
elseif type(data) == "string" then
chunk = assert(loadfile(data))
else
croak("unknown type %s for unit_generator %q", type(data), tostring(data))
end
setfenv(chunk, self.FunctionEnv)
chunk()
end
function parse(data)
p = new_parser()
current = p
p:parse_rec(data)
current = nil
return p.Results, p.DefaultTargets, p.AlwaysTargets
end

View File

@ -0,0 +1,155 @@
module(..., package.seeall)
local boot = require "tundra.boot"
local util = require "tundra.util"
local path = require "tundra.path"
local native = require "tundra.native"
local environment = require "tundra.environment"
local default_pass = { Name = "Default", BuildOrder = 100000 }
local all_nodes = {}
local _node_mt = {}
_node_mt.__index = _node_mt
function make_node(data_)
local env_ = data_.Env
assert(environment.is_environment(env_), "Env must be provided")
local root_path = native.getcwd() .. env_:get('SEP')
local function path_for_cmdline(p)
local full_path
if path.is_absolute(p) then
full_path = p
else
full_path = root_path .. p
end
if full_path:find(' ', 1, true) then
return '"' .. full_path .. '"'
else
return full_path
end
end
local function normalize_paths(paths)
return util.mapnil(paths, function (x)
if type(x) == "string" then
local v = env_:interpolate(x)
v = path.normalize(v)
return v
else
return x
end
end)
end
-- these are the inputs that $(<) expand to
local regular_inputs = normalize_paths(data_.InputFiles)
-- these are other, auxillary input files that shouldn't appear on the command line
-- useful to e.g. add an input dependency on a tool
local implicit_inputs = normalize_paths(data_.ImplicitInputs)
local inputs = util.merge_arrays_2(regular_inputs, implicit_inputs)
local outputs = normalize_paths(data_.OutputFiles)
local inputs_sorted = inputs and util.clone_array(inputs) or {}
local outputs_sorted = outputs and util.clone_array(outputs) or {}
local cmdline_inputs = util.merge_arrays(regular_inputs, data_.InputFilesUntracked)
table.sort(inputs_sorted)
table.sort(outputs_sorted)
-- Quote the paths before interpolation into the command line
local expand_env = {
['<'] = util.mapnil(cmdline_inputs, path_for_cmdline),
['@'] = util.mapnil(outputs, path_for_cmdline),
}
local expand_env_pretty = {
['<'] = cmdline_inputs,
['@'] = outputs,
}
local overwrite = true
if type(data_.OverwriteOutputs) ~= "nil" then
overwrite = data_.OverwriteOutputs
end
if data_.Scanner and not data_.Scanner.Kind then
errorf("Missing scanner kind")
end
-- make sure dependencies are unique
local unique_deps = util.uniq(data_.Dependencies or {})
local params = {
pass = data_.Pass or default_pass,
scanner = data_.Scanner,
deps = unique_deps,
inputs = inputs_sorted,
outputs = outputs_sorted,
is_precious = data_.Precious,
expensive = data_.Expensive,
overwrite_outputs = overwrite,
src_env = env_,
env = env_.external_vars,
aux_outputs = util.mapnil(data_.AuxOutputFiles, function (x)
local result = env_:interpolate(x, expand_env)
return path.normalize(result)
end),
}
if data_.Action then
params.action = env_:interpolate(data_.Action, expand_env)
else
assert(0 == #params.outputs, "can't have output files without an action")
params.action = ""
end
if data_.PreAction then
params.preaction = env_:interpolate(data_.PreAction, expand_env)
end
params.annotation = env_:interpolate(data_.Label or "?", expand_env_pretty)
local result = setmetatable(params, _node_mt)
-- Stash node
all_nodes[#all_nodes + 1] = result
return result
end
function is_node(obj)
return getmetatable(obj) == _node_mt
end
function _node_mt:insert_output_files(tab, exts)
if exts then
local lut = util.make_lookup_table(exts)
for _, fn in ipairs(self.outputs) do
local ext = path.get_extension(fn)
if lut[ext] then
tab[#tab + 1] = fn
end
end
else
for _, fn in ipairs(self.outputs) do
tab[#tab + 1] = fn
end
end
end
function _node_mt:insert_deps(tab)
for _, dep in util.nil_ipairs(self.deps) do
tab[#tab + 1] = dep
end
end
function get_all_nodes()
return all_nodes
end

View File

@ -0,0 +1,41 @@
module(..., package.seeall)
local native = require "tundra.native"
-- Stash of all dir walks performed for signature generation.
local query_records = {}
function walk(path, filter_callback)
local dir_stack = { path }
local paths_out = {}
while #dir_stack > 0 do
local dir = dir_stack[#dir_stack]
table.remove(dir_stack)
local subdirs, files = native.list_directory(dir)
query_records[dir] = { Files = files, SubDirs = subdirs }
for _, subdir in ipairs(subdirs) do
full_dir_path = dir .. SEP .. subdir
if not filter_callback or filter_callback(subdir) then
table.insert(dir_stack, full_dir_path)
end
end
for _, file in ipairs(files) do
table.insert(paths_out, dir .. SEP .. file)
end
end
return paths_out
end
function all_queries()
local result = {}
for k, v in pairs(query_records) do
result[#result + 1] = { Path = k, Files = v.Files, SubDirs = v.SubDirs }
end
return result
end

View File

@ -0,0 +1,316 @@
module(..., package.seeall)
local util = require 'tundra.util'
local path = require 'tundra.path'
local depgraph = require 'tundra.depgraph'
local nenv = require 'tundra.environment.native'
local os = require 'os'
local global_setup = {}
--[==[
The environment is a holder for variables and their associated values. Values
are always kept as tables, even if there is only a single value.
FOO = { a b c }
e:interpolate("$(FOO)") -> "a b c"
e:interpolate("$(FOO:j, )") -> "a, b, c"
e:interpolate("$(FOO:p-I)") -> "-Ia -Ib -Ic"
Missing keys trigger errors unless a default value is specified.
]==]--
local envclass = {}
function envclass:create(parent, assignments, obj)
obj = obj or {}
setmetatable(obj, self)
self.__index = self
obj.cached_interpolation = {}
obj.vars = {}
obj.parent = parent
obj.lookup = { obj.vars }
obj.memos = {}
obj.memo_keys = {}
obj.external_vars = parent and util.clone_table(parent.external_vars) or nil
-- assign initial bindings
if assignments then
obj:set_many(assignments)
end
return obj
end
function envclass:clone(assignments)
return envclass:create(self, assignments)
end
function envclass:register_implicit_make_fn(ext, fn, docstring)
if type(ext) ~= "string" then
errorf("extension must be a string")
end
if type(fn) ~= "function" then
errorf("fn must be a function")
end
if not ext:match("^%.") then
ext = "." .. ext -- we want the dot in the extension
end
if not self._implicit_exts then
self._implicit_exts = {}
end
self._implicit_exts[ext] = {
Function = fn,
Doc = docstring or "",
}
end
function envclass:get_implicit_make_fn(filename)
local ext = path.get_extension(filename)
local chain = self
while chain do
local t = chain._implicit_exts
if t then
local v = t[ext]
if v then return v.Function end
end
chain = chain.parent
end
return nil
end
function envclass:has_key(key)
local chain = self
while chain do
if chain.vars[key] then
return true
end
chain = chain.parent
end
return false
end
function envclass:get_vars()
return self.vars
end
function envclass:set_many(table)
for k, v in pairs(table) do
self:set(k, v)
end
end
function envclass:append(key, value)
if type(value) ~= "string" then
error("environment append: " .. util.tostring(value) .. " is not a string", 2)
end
self:invalidate_memos(key)
local t = self:get_list(key, 1)
local result
if type(t) == "table" then
result = util.clone_array(t)
table.insert(result, value)
else
result = { value }
end
self.vars[key] = result
end
function envclass:append_many(data)
for k, v in pairs(data) do
self:append(k, v)
end
end
function envclass:replace(key, value)
if type(value) == "string" then
value = { value }
end
assert(type(value) == "table")
self:invalidate_memos(key)
self.vars[key] = value
end
function envclass:invalidate_memos(key)
self.cached_interpolation = {}
local name_tab = self.memo_keys[key]
if name_tab then
for name, _ in pairs(name_tab) do
self.memos[name] = nil
end
end
end
function envclass:set_default(key, value)
if not self:has_key(key) then
self:set(key, value)
end
end
function envclass:set_default_many(table)
for key, value in pairs(table) do
self:set_default(key, value)
end
end
function envclass:set(key, value)
self:invalidate_memos(key)
assert(key:len() > 0, "key must not be empty")
assert(type(key) == "string", "key must be a string")
if type(value) == "string" then
if value:len() > 0 then
self.vars[key] = { value }
else
-- let empty strings make empty tables
self.vars[key] = {}
end
elseif type(value) == "table" then
-- FIXME: should filter out empty values
for _, v in ipairs(value) do
if not type(v) == "string" then
error("key " .. key .. "'s table value contains non-string value " .. tostring(v))
end
end
self.vars[key] = util.clone_array(value)
else
error("key " .. key .. "'s value is neither table nor string: " .. tostring(value))
end
end
function envclass:get_id()
return self.id
end
function envclass:get(key, default)
local v = self.vars[key]
if v then
return table.concat(v, " ")
elseif self.parent then
return self.parent:get(key, default)
elseif default then
return default
else
error(string.format("key '%s' not present in environment", key))
end
end
function envclass:get_list(key, default)
local v = self.vars[key]
if v then
return v -- FIXME: this should be immutable from the outside
elseif self.parent then
return self.parent:get_list(key, default)
elseif default then
return default
elseif not key then
error("nil key is not allowed")
else
error(string.format("key '%s' not present in environment", key))
end
end
function envclass:get_parent()
return self.parent
end
function envclass:interpolate(str, vars)
local cached = self.cached_interpolation[str]
if not cached then
cached = nenv.interpolate(str, self)
self.cached_interpolation[str] = cached
end
if vars then
return nenv.interpolate(cached, self, vars)
else
return cached
end
end
function create(parent, assignments, obj)
return envclass:create(parent, assignments, obj)
end
function envclass:record_memo_var(key, name)
local tab = self.memo_keys[key]
if not tab then
tab = {}
self.memo_keys[key] = tab
end
tab[name] = true
end
function envclass:memoize(key, name, fn)
local memo = self.memos[name]
if not memo then
self:record_memo_var(key, name)
memo = fn()
self.memos[name] = memo
end
return memo
end
function envclass:get_external_env_var(key)
local chain = self
while chain do
local t = self.external_vars
if t then
local v = t[key]
if v then return v end
end
chain = chain.parent
end
return os.getenv(key)
end
function envclass:set_external_env_var(key, value)
local t = self.external_vars
if not t then
t = {}
self.external_vars = t
end
t[key] = value
end
function envclass:add_setup_function(fn)
local t = self.setup_funcs
if not t then
t = {}
self.setup_funcs = t
end
t[#t + 1] = fn
end
function envclass:run_setup_functions()
for _, func in ipairs(global_setup) do
func(self)
end
t = self.setup_funcs
local chain = self
while chain do
for _, func in util.nil_ipairs(chain.setup_funcs) do
func(self)
end
chain = chain.parent
end
end
function add_global_setup(fn)
global_setup[#global_setup + 1] = fn
end
function is_environment(datum)
return getmetatable(datum) == envclass
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".dylib",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "mono ",
["HOSTPROGSUFFIX"] = "",
["HOSTSHLIBSUFFIX"] = ".so",
["_COPY_FILE"] = "cp -f $(<) $(@)",
["_HARDLINK_FILE"] = "ln -f $(<) $(@)",
}
end

View File

@ -0,0 +1,11 @@
module(..., package.seeall)
function apply_host(env)
env:set_many {
["DOTNETRUN"] = "",
["HOSTPROGSUFFIX"] = ".exe",
["HOSTSHLIBSUFFIX"] = ".dll",
["_COPY_FILE"] = "copy $(<) $(@)",
["_HARDLINK_FILE"] = "copy /f $(<) $(@)",
}
end

View File

@ -0,0 +1,864 @@
module(..., package.seeall)
local native = require "tundra.native"
local nodegen = require "tundra.nodegen"
local path = require "tundra.path"
local util = require "tundra.util"
LF = '\r\n'
local UTF_HEADER = '\239\187\191' -- byte mark EF BB BF
local VERSION_NUMBER = "12.00"
local VERSION_YEAR = "2012"
local HOOKS = {}
local msvc_generator = {}
msvc_generator.__index = msvc_generator
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary", "CSharpExe", "CSharpLib", "ObjGroup",
}
local toplevel_stuff = util.make_lookup_table {
".exe", ".lib", ".dll",
}
local binary_extension = util.make_lookup_table {
".exe", ".lib", ".dll", ".pdb", ".res", ".obj", ".o", ".a",
}
local header_exts = util.make_lookup_table {
".h", ".hpp", ".hh", ".inl",
}
-- Scan for sources, following dependencies until those dependencies seem to be
-- a different top-level unit
local function get_sources(dag, sources, generated, level, dag_lut)
for _, output in util.nil_ipairs(dag.outputs) do
local ext = path.get_extension(output)
if not binary_extension[ext] then
generated[output] = true
sources[output] = true -- pick up generated headers
end
end
for _, input in util.nil_ipairs(dag.inputs) do
local ext = path.get_extension(input)
if not binary_extension[ext] then
sources[input] = true
end
end
for _, dep in util.nil_ipairs(dag.deps) do
if not dag_lut[dep] then -- don't go into other top-level DAGs
get_sources(dep, sources, generated, level + 1, dag_lut)
end
end
end
function get_guid_string(data)
local sha1 = native.digest_guid(data)
local guid = sha1:sub(1, 8) .. '-' .. sha1:sub(9,12) .. '-' .. sha1:sub(13,16) .. '-' .. sha1:sub(17,20) .. '-' .. sha1:sub(21, 32)
assert(#guid == 36)
return guid:upper()
end
local function get_headers(unit, source_lut, dag_lut, name_to_dags)
local src_dir = ''
if not unit.Decl then
-- Ignore ExternalLibrary and similar that have no data.
return
end
if unit.Decl.SourceDir then
src_dir = unit.Decl.SourceDir .. '/'
end
for _, src in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Sources)) do
if type(src) == "string" then
local ext = path.get_extension(src)
if header_exts[ext] then
local full_path = path.normalize(src_dir .. src)
source_lut[full_path] = true
end
end
end
local function toplevel(u)
if type(u) == "string" then
return type(name_to_dags[u]) ~= "nil"
end
for _, dag in pairs(u.Decl.__DagNodes) do
if dag_lut[dag] then
return true
end
end
return false
end
-- Repeat for dependencies ObjGroups
for _, dep in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Depends)) do
if not toplevel(dep) then
get_headers(dep, source_lut, dag_lut)
end
end
end
local function make_meta_project(base_dir, data)
data.Guid = get_guid_string(data.Name)
data.IdeGenerationHints = { Msvc = { SolutionFolder = "Build System Meta" } }
data.IsMeta = true
data.RelativeFilename = data.Name .. ".vcxproj"
data.Filename = base_dir .. data.RelativeFilename
data.Type = "meta"
if not data.Sources then
data.Sources = {}
end
return data
end
local function tundra_cmdline(args)
local root_dir = native.getcwd()
return "\"" .. TundraExePath .. "\" -C \"" .. root_dir .. "\" " .. args
end
local function project_regen_commandline(ide_script)
return tundra_cmdline("-g " .. ide_script)
end
local function make_project_data(units_raw, env, proj_extension, hints, ide_script)
-- Filter out stuff we don't care about.
local units = util.filter(units_raw, function (u)
return u.Decl.Name and project_types[u.Keyword]
end)
local base_dir = hints.MsvcSolutionDir and (hints.MsvcSolutionDir .. '\\') or env:interpolate('$(OBJECTROOT)$(SEP)')
native.mkdir(base_dir)
local project_by_name = {}
local all_sources = {}
local dag_node_lut = {} -- lookup table of all named, top-level DAG nodes
local name_to_dags = {} -- table mapping unit name to array of dag nodes (for configs)
-- Map out all top-level DAG nodes
for _, unit in ipairs(units) do
local decl = unit.Decl
local dag_nodes = assert(decl.__DagNodes, "no dag nodes for " .. decl.Name)
for build_id, dag_node in pairs(dag_nodes) do
dag_node_lut[dag_node] = unit
local array = name_to_dags[decl.Name]
if not array then
array = {}
name_to_dags[decl.Name] = array
end
array[#array + 1] = dag_node
end
end
local function get_output_project(name)
if not project_by_name[name] then
local relative_fn = name .. proj_extension
project_by_name[name] = {
Name = name,
Sources = {},
RelativeFilename = relative_fn,
Filename = base_dir .. relative_fn,
Guid = get_guid_string(name),
BuildByDefault = hints.BuildAllByDefault,
}
end
return project_by_name[name]
end
-- Sort units based on dependency complexity. We want to visit the leaf nodes
-- first so that any source file references are picked up as close to the
-- bottom of the dependency chain as possible.
local unit_weights = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local stack = { }
for _, dag in pairs(decl.__DagNodes) do
stack[#stack + 1] = dag
end
local weight = 0
while #stack > 0 do
local node = table.remove(stack)
if dag_node_lut[node] then
weight = weight + 1
end
for _, dep in util.nil_ipairs(node.deps) do
stack[#stack + 1] = dep
end
end
unit_weights[unit] = weight
end
table.sort(units, function (a, b)
return unit_weights[a] < unit_weights[b]
end)
-- Keep track of what source files have already been grabbed by other projects.
local grabbed_sources = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local name = decl.Name
local source_lut = {}
local generated_lut = {}
for build_id, dag_node in pairs(decl.__DagNodes) do
get_sources(dag_node, source_lut, generated_lut, 0, dag_node_lut)
end
-- Explicitly add all header files too as they are not picked up from the DAG
-- Also pick up headers from non-toplevel DAGs we're depending on
get_headers(unit, source_lut, dag_node_lut, name_to_dags)
-- Figure out which project should get this data.
local output_name = name
local ide_hints = unit.Decl.IdeGenerationHints
if ide_hints then
if ide_hints.OutputProject then
output_name = ide_hints.OutputProject
end
end
local proj = get_output_project(output_name)
if output_name == name then
-- This unit is the real thing for this project, not something that's
-- just being merged into it (like an ObjGroup). Set some more attributes.
proj.IdeGenerationHints = ide_hints
proj.DagNodes = decl.__DagNodes
proj.Unit = unit
end
for src, _ in pairs(source_lut) do
local norm_src = path.normalize(src)
if not grabbed_sources[norm_src] then
grabbed_sources[norm_src] = unit
local is_generated = generated_lut[src]
proj.Sources[#proj.Sources+1] = {
Path = norm_src,
Generated = is_generated,
}
end
end
end
-- Get all accessed Lua files
local accessed_lua_files = util.table_keys(get_accessed_files())
-- Filter out the ones that belong to this build (exclude ones coming from Tundra)
local function is_non_tundra_lua_file(p)
return not path.is_absolute(p)
end
local function make_src_node(p)
return { Path = path.normalize(p) }
end
local source_list = util.map(util.filter(accessed_lua_files, is_non_tundra_lua_file), make_src_node)
local solution_hints = hints.MsvcSolutions
if not solution_hints then
print("No IdeGenerationHints.MsvcSolutions specified - using defaults")
solution_hints = {
['tundra-generated.sln'] = {}
}
end
local projects = util.table_values(project_by_name)
local vanilla_projects = util.clone_array(projects)
local solutions = {}
-- Create meta project to regenerate solutions/projects. Added to every solution.
local regen_meta_proj = make_meta_project(base_dir, {
Name = "00-Regenerate-Projects",
FriendlyName = "Regenerate Solutions and Projects",
BuildCommand = project_regen_commandline(ide_script),
})
projects[#projects + 1] = regen_meta_proj
for name, data in pairs(solution_hints) do
local sln_projects
local ext_projects = {}
if data.Projects then
sln_projects = {}
for _, pname in ipairs(data.Projects) do
local pp = project_by_name[pname]
if not pp then
errorf("can't find project %s for inclusion in %s -- check your MsvcSolutions data", pname, name)
end
sln_projects[#sln_projects + 1] = pp
end
else
-- All the projects (that are not meta)
sln_projects = util.clone_array(vanilla_projects)
end
for _, ext in util.nil_ipairs(data.ExternalProjects) do
ext_projects[#ext_projects + 1] = ext
end
local meta_proj = make_meta_project(base_dir, {
Name = "00-tundra-" .. path.drop_suffix(name),
FriendlyName = "Build This Solution",
BuildByDefault = true,
Sources = source_list,
BuildProjects = util.clone_array(sln_projects),
})
sln_projects[#sln_projects + 1] = regen_meta_proj
sln_projects[#sln_projects + 1] = meta_proj
projects[#projects + 1] = meta_proj
solutions[#solutions + 1] = {
Filename = base_dir .. name,
Projects = sln_projects,
ExternalProjects = ext_projects,
BuildSolutionProject = meta_proj,
}
end
return solutions, projects
end
local cl_tags = {
['.h'] = 'ClInclude',
['.hh'] = 'ClInclude',
['.hpp'] = 'ClInclude',
['.inl'] = 'ClInclude',
}
local function slurp_file(fn)
local fh, err = io.open(fn, 'rb')
if fh then
local data = fh:read("*all")
fh:close()
return data
end
return ''
end
local function replace_if_changed(new_fn, old_fn)
local old_data = slurp_file(old_fn)
local new_data = slurp_file(new_fn)
if old_data == new_data then
os.remove(new_fn)
return
end
printf("Updating %s", old_fn)
os.remove(old_fn)
os.rename(new_fn, old_fn)
end
function msvc_generator:generate_solution(fn, projects, ext_projects, solution)
local sln = io.open(fn .. '.tmp', 'wb')
sln:write(UTF_HEADER, LF, "Microsoft Visual Studio Solution File, Format Version ", VERSION_NUMBER, LF, "# Visual Studio ", VERSION_YEAR, LF)
-- Map folder names to array of projects under that folder
local sln_folders = {}
for _, proj in ipairs(projects) do
local hints = proj.IdeGenerationHints
local msvc_hints = hints and hints.Msvc or nil
local folder = msvc_hints and msvc_hints.SolutionFolder or nil
if folder then
local projects = sln_folders[folder] or {}
projects[#projects + 1] = proj
sln_folders[folder] = projects
end
end
for _, proj in ipairs(projects) do
local name = proj.Name
local fname = proj.RelativeFilename
local guid = proj.Guid
sln:write(string.format('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "%s", "%s", "{%s}"', name, fname, guid), LF)
sln:write('EndProject', LF)
end
-- Dump external projects. Make them depend on everything in this solution being built by Tundra.
for _, data in util.nil_ipairs(ext_projects) do
local guid = data.Guid
local fname = path.normalize(path.join(native.getcwd(), data.Filename))
local name = path.get_filename_base(fname)
sln:write(string.format('Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "%s", "%s", "{%s}"', name, fname, guid), LF)
local build_sln_proj = solution.BuildSolutionProject
if build_sln_proj then
local meta_guid = build_sln_proj.Guid
sln:write('\tProjectSection(ProjectDependencies) = postProject', LF)
sln:write('\t\t{', meta_guid,'} = {', meta_guid,'}', LF)
sln:write('\tEndProjectSection', LF)
end
sln:write('EndProject', LF)
end
for folder_name, _ in pairs(sln_folders) do
local folder_guid = get_guid_string("folder/" .. folder_name)
sln:write(string.format('Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "%s", "%s", "{%s}"', folder_name, folder_name, folder_guid), LF)
sln:write('EndProject', LF)
end
sln:write("Global", LF)
sln:write("\tGlobalSection(SolutionConfigurationPlatforms) = preSolution", LF)
for _, tuple in ipairs(self.config_tuples) do
sln:write(string.format('\t\t%s = %s', tuple.MsvcName, tuple.MsvcName), LF)
end
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(ProjectConfigurationPlatforms) = postSolution", LF)
for _, proj in ipairs(projects) do
for _, tuple in ipairs(self.config_tuples) do
local leader = string.format('\t\t{%s}.%s.', proj.Guid, tuple.MsvcName)
sln:write(leader, "ActiveCfg = ", tuple.MsvcName, LF)
if proj.BuildByDefault then
sln:write(leader, "Build.0 = ", tuple.MsvcName, LF)
end
end
end
-- External projects build by default, and after Tundra is done (depends on "Build this solution").
for _, proj in util.nil_ipairs(ext_projects) do
for _, tuple in ipairs(self.config_tuples) do
local leader = string.format('\t\t{%s}.%s.', proj.Guid, tuple.MsvcName)
sln:write(leader, "ActiveCfg = ", tuple.MsvcName, LF)
if not proj.Platform or proj.Platform == tuple.MsvcPlatform then
sln:write(leader, "Build.0 = ", tuple.MsvcName, LF)
end
end
end
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(SolutionProperties) = preSolution", LF)
sln:write("\t\tHideSolutionNode = FALSE", LF)
sln:write("\tEndGlobalSection", LF)
sln:write("\tGlobalSection(NestedProjects) = preSolution", LF)
for folder_name, projects in pairs(sln_folders) do
local folder_guid = get_guid_string("folder/" .. folder_name)
for _, project in ipairs(projects) do
sln:write(string.format('\t\t{%s} = {%s}', project.Guid, folder_guid), LF)
end
end
sln:write("\tEndGlobalSection", LF)
sln:write("EndGlobal", LF)
sln:close()
replace_if_changed(fn .. ".tmp", fn)
end
local function find_dag_node_for_config(project, tuple)
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
local nodes = project.DagNodes
if not nodes then
return nil
end
if nodes[build_id] then
return nodes[build_id]
end
errorf("couldn't find config %s for project %s (%d dag nodes) - available: %s",
build_id, project.Name, #nodes, table.concat(util.table_keys(nodes), ", "))
end
function msvc_generator:generate_project(project, all_projects)
local fn = project.Filename
local p = assert(io.open(fn .. ".tmp", 'wb'))
p:write('<?xml version="1.0" encoding="utf-8"?>', LF)
p:write('<Project')
p:write(' DefaultTargets="Build"')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
-- List all project configurations
p:write('\t<ItemGroup Label="ProjectConfigurations">', LF)
for _, tuple in ipairs(self.config_tuples) do
p:write('\t\t<ProjectConfiguration Include="', tuple.MsvcName, '">', LF)
p:write('\t\t\t<Configuration>', tuple.MsvcConfiguration, '</Configuration>', LF)
p:write('\t\t\t<Platform>', tuple.MsvcPlatform, '</Platform>', LF)
p:write('\t\t</ProjectConfiguration>', LF)
end
p:write('\t</ItemGroup>', LF)
p:write('\t<PropertyGroup Label="Globals">', LF)
p:write('\t\t<ProjectGuid>{', project.Guid, '}</ProjectGuid>', LF)
p:write('\t\t<Keyword>MakeFileProj</Keyword>', LF)
if project.FriendlyName then
p:write('\t\t<ProjectName>', project.FriendlyName, '</ProjectName>', LF)
end
if HOOKS.global_properties then
HOOKS.global_properties(p, project)
end
p:write('\t</PropertyGroup>', LF)
p:write('\t<PropertyGroup>', LF)
if VERSION_YEAR == '2012' then
p:write('\t\t<_ProjectFileVersion>10.0.30319.1</_ProjectFileVersion>', LF)
end
p:write('\t</PropertyGroup>', LF)
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />', LF)
-- Mark all project configurations as makefile-type projects
for _, tuple in ipairs(self.config_tuples) do
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'" Label="Configuration">', LF)
p:write('\t\t<ConfigurationType>Makefile</ConfigurationType>', LF)
p:write('\t\t<UseDebugLibraries>true</UseDebugLibraries>', LF) -- I have no idea what this setting affects
if VERSION_YEAR == '2012' then
p:write('\t\t<PlatformToolset>v110</PlatformToolset>', LF) -- I have no idea what this setting affects
elseif VERSION_YEAR == '2013' then
p:write('\t\t<PlatformToolset>v120</PlatformToolset>', LF) -- I have no idea what this setting affects
end
p:write('\t</PropertyGroup>', LF)
end
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />', LF)
for _, tuple in ipairs(self.config_tuples) do
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'">', LF)
local dag_node = find_dag_node_for_config(project, tuple)
local include_paths, defines
if dag_node then
local env = dag_node.src_env
local paths = util.map(env:get_list("CPPPATH"), function (p)
local ip = path.normalize(env:interpolate(p))
if not path.is_absolute(ip) then
ip = native.getcwd() .. '\\' .. ip
end
return ip
end)
include_paths = table.concat(paths, ';')
local ext_paths = env:get_external_env_var('INCLUDE')
if ext_paths then
include_paths = include_paths .. ';' .. ext_paths
end
defines = env:interpolate("$(CPPDEFS:j;)")
else
include_paths = ''
defines = ''
end
local root_dir = native.getcwd()
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
local base = "\"" .. TundraExePath .. "\" -C \"" .. root_dir .. "\" "
local build_cmd = base .. build_id
local clean_cmd = base .. "--clean " .. build_id
local rebuild_cmd = base .. "--rebuild " .. build_id
if project.BuildCommand then
build_cmd = project.BuildCommand
clean_cmd = ""
rebuild_cmd = ""
elseif not project.IsMeta then
build_cmd = build_cmd .. " " .. project.Name
clean_cmd = clean_cmd .. " " .. project.Name
rebuild_cmd = rebuild_cmd .. " " .. project.Name
else
local all_projs_str = table.concat(
util.map(assert(project.BuildProjects), function (p) return p.Name end), ' ')
build_cmd = build_cmd .. " " .. all_projs_str
clean_cmd = clean_cmd .. " " .. all_projs_str
rebuild_cmd = rebuild_cmd .. " " .. all_projs_str
end
p:write('\t\t<NMakeBuildCommandLine>', build_cmd, '</NMakeBuildCommandLine>', LF)
p:write('\t\t<NMakeOutput></NMakeOutput>', LF)
p:write('\t\t<NMakeCleanCommandLine>', clean_cmd, '</NMakeCleanCommandLine>', LF)
p:write('\t\t<NMakeReBuildCommandLine>', rebuild_cmd, '</NMakeReBuildCommandLine>', LF)
p:write('\t\t<NMakePreprocessorDefinitions>', defines, ';$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>', LF)
p:write('\t\t<NMakeIncludeSearchPath>', include_paths, ';$(NMakeIncludeSearchPath)</NMakeIncludeSearchPath>', LF)
p:write('\t\t<NMakeForcedIncludes>$(NMakeForcedIncludes)</NMakeForcedIncludes>', LF)
p:write('\t</PropertyGroup>', LF)
end
if HOOKS.pre_sources then
HOOKS.pre_sources(p, project)
end
-- Emit list of source files
p:write('\t<ItemGroup>', LF)
for _, record in ipairs(project.Sources) do
local path_str = assert(record.Path)
if not path.is_absolute(path_str) then
path_str = native.getcwd() .. '\\' .. path_str
end
local ext = path.get_extension(path_str)
local cl_tag = cl_tags[ext] or 'ClCompile'
p:write('\t\t<', cl_tag,' Include="', path_str, '" />', LF)
end
p:write('\t</ItemGroup>', LF)
local post_src_hook = HOOKS.post_sources
if post_src_hook then
post_src_hook(p, project)
end
p:write('\t<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />', LF)
if VERSION_YEAR == "2012" then
-- Import helper msbuild stuff to make build aborting work propertly in VS2012
local xml = path.normalize(TundraScriptDir .. '/tundra/ide/msvc-rules.xml')
p:write('\t<Import Project="', xml, '" />', LF)
end
p:write('</Project>', LF)
p:close()
replace_if_changed(fn .. ".tmp", fn)
end
local function get_common_dir(sources)
local dir_tokens = {}
for _, src in ipairs(sources) do
local path = assert(src.Path)
if not tundra.path.is_absolute(path) then
local subdirs = {}
for subdir in path:gmatch("([^\\\]+)\\") do
subdirs[#subdirs + 1] = subdir
end
if #dir_tokens == 0 then
dir_tokens = subdirs
else
for i = 1, #dir_tokens do
if dir_tokens[i] ~= subdirs[i] then
while #dir_tokens >= i do
table.remove(dir_tokens)
end
break
end
end
end
end
end
local result = table.concat(dir_tokens, '\\')
if #result > 0 then
result = result .. '\\'
end
return result
end
function msvc_generator:generate_project_filters(project)
local fn = project.Filename .. ".filters"
local p = assert(io.open(fn .. ".tmp", 'wb'))
p:write('<?xml version="1.0" encoding="Windows-1252"?>', LF)
p:write('<Project')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
local common_dir = get_common_dir(util.filter(project.Sources, function (s) return not s.Generated end))
local common_dir_gen = get_common_dir(util.filter(project.Sources, function (s) return s.Generated end))
local filters = {}
local sources = {}
-- Mangle source filenames, and find which filters need to be created
for _, record in ipairs(project.Sources) do
local fn = record.Path
local common_start = record.Generated and common_dir_gen or common_dir
if fn:find(common_start, 1, true) then
fn = fn:sub(#common_start+1)
end
local dir, filename = path.split(fn)
if dir == '.' then
dir = nil
end
local abs_path = record.Path
if not path.is_absolute(abs_path) then
abs_path = native.getcwd() .. '\\' .. abs_path
end
if record.Generated then
dir = 'Generated Files'
end
sources[#sources + 1] = {
FullPath = abs_path,
Directory = dir,
}
-- Register filter and all its parents
while dir and dir ~= '.' do
filters[dir] = true
dir, _ = path.split(dir)
end
end
-- Emit list of filters
p:write('\t<ItemGroup>', LF)
for filter_name, _ in pairs(filters) do
if filter_name ~= "" then
filter_guid = get_guid_string(filter_name)
p:write('\t\t<Filter Include="', filter_name, '">', LF)
p:write('\t\t\t<UniqueIdentifier>{', filter_guid, '}</UniqueIdentifier>', LF)
p:write('\t\t</Filter>', LF)
end
end
p:write('\t</ItemGroup>', LF)
-- Emit list of source files
p:write('\t<ItemGroup>', LF)
for _, source in ipairs(sources) do
local ext = path.get_extension(source.FullPath)
local cl_tag = cl_tags[ext] or 'ClCompile'
if not source.Directory then
p:write('\t\t<', cl_tag, ' Include="', source.FullPath, '" />', LF)
else
p:write('\t\t<', cl_tag, ' Include="', source.FullPath, '">', LF)
p:write('\t\t\t<Filter>', source.Directory, '</Filter>', LF)
p:write('\t\t</', cl_tag, '>', LF)
end
end
p:write('\t</ItemGroup>', LF)
p:write('</Project>', LF)
p:close()
replace_if_changed(fn .. ".tmp", fn)
end
function msvc_generator:generate_project_user(project)
local fn = project.Filename .. ".user"
-- Don't overwrite user settings
do
local p, err = io.open(fn, 'rb')
if p then
p:close()
return
end
end
local p = assert(io.open(fn, 'wb'))
p:write('<?xml version="1.0" encoding="utf-8"?>', LF)
p:write('<Project')
p:write(' ToolsVersion="4.0"')
p:write(' xmlns="http://schemas.microsoft.com/developer/msbuild/2003"')
p:write('>', LF)
for _, tuple in ipairs(self.config_tuples) do
local dag_node = find_dag_node_for_config(project, tuple)
if dag_node then
local exe = nil
for _, output in util.nil_ipairs(dag_node.outputs) do
if output:match("%.exe") then
exe = output
break
end
end
if exe then
p:write('\t<PropertyGroup Condition="\'$(Configuration)|$(Platform)\'==\'', tuple.MsvcName, '\'">', LF)
p:write('\t\t<LocalDebuggerCommand>', native.getcwd() .. '\\' .. exe, '</LocalDebuggerCommand>', LF)
p:write('\t\t<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>', LF)
p:write('\t\t<LocalDebuggerWorkingDirectory>', native.getcwd(), '</LocalDebuggerWorkingDirectory>', LF)
p:write('\t</PropertyGroup>', LF)
end
end
end
p:write('</Project>', LF)
p:close()
end
function msvc_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names, hints, ide_script)
assert(config_tuples and #config_tuples > 0)
if not hints then
hints = {}
end
local complained_mappings = {}
self.msvc_platforms = {}
local msvc_hints = hints.Msvc or {}
local variant_mappings = msvc_hints.VariantMappings or {}
local platform_mappings = msvc_hints.PlatformMappings or {}
local full_mappings = msvc_hints.FullMappings or {}
for _, tuple in ipairs(config_tuples) do
local build_id = string.format("%s-%s-%s", tuple.Config.Name, tuple.Variant.Name, tuple.SubVariant)
if full_mappings[build_id] then
local m = full_mappings[build_id]
tuple.MsvcConfiguration = assert(m.Config)
tuple.MsvcPlatform = assert(m.Platform)
elseif variant_mappings[tuple.Variant.Name] then
tuple.MsvcConfiguration = variant_mappings[tuple.Variant.Name]
elseif variant_mappings[tuple.Variant.Name .. "-" .. tuple.SubVariant] then
tuple.MsvcConfiguration = variant_mappings[tuple.Variant.Name .. "-" .. tuple.SubVariant]
else
tuple.MsvcConfiguration = tuple.Variant.Name
end
-- Use IdeGenerationHints.Msvc.PlatformMappings table to map tundra
-- configurations to MSVC platform names. Note that this isn't a huge deal
-- for building stuff as Tundra doesn't care about this setting. But it
-- might influence the choice of debugger and affect include paths for
-- things like Intellisense that certain users may care about.
if not tuple.MsvcPlatform then
tuple.MsvcPlatform = platform_mappings[tuple.Config.Name]
end
-- If we didn't find anything, warn and then default to Win32, which VS
-- will always accept (or so one would assume)
if not tuple.MsvcPlatform then
tuple.MsvcPlatform = "Win32"
if not complained_mappings[tuple.Config.Name] then
printf("warning: No VS platform mapping for %s, mapping to Win32", tuple.Config.Name)
print("(Add one to IdeGenerationHints.Msvc.PlatformMappings to override)")
complained_mappings[tuple.Config.Name] = true
end
end
tuple.MsvcName = tuple.MsvcConfiguration .. "|" .. tuple.MsvcPlatform
self.msvc_platforms[tuple.MsvcPlatform] = true
end
self.config_tuples = config_tuples
printf("Generating Visual Studio projects for %d configurations/variants", #config_tuples)
-- Figure out where we're going to store the projects
local solutions, projects = make_project_data(raw_nodes, env, ".vcxproj", hints, ide_script)
local proj_lut = {}
for _, p in ipairs(projects) do
proj_lut[p.Name] = p
end
for _, sln in pairs(solutions) do
self:generate_solution(sln.Filename, sln.Projects, sln.ExternalProjects, sln)
end
for _, proj in ipairs(projects) do
self:generate_project(proj, projects)
self:generate_project_filters(proj)
self:generate_project_user(proj)
end
end
function setup(version_short, version_year, hooks)
VERSION_NUMBER = version_short
VERSION_YEAR = version_year
if hooks then
HOOKS = hooks
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, msvc_generator)
state:generate_files(...)
end)
end

View File

@ -0,0 +1,173 @@
<!--
This file is an awful hack to create a nmake-like builder for tundra that
doesn't just kill the tool when you cancel the build.
-->
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<UsingTask TaskName="VCMessage" AssemblyName="Microsoft.Build.CppTasks.Common, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
<UsingTask TaskName="SofterExec" TaskFactory="CodeTaskFactory" AssemblyFile="$(MSBuildToolsPath)\Microsoft.Build.Tasks.v4.0.dll">
<ParameterGroup>
<Command Required="true" />
</ParameterGroup>
<Task>
<Reference Include="Microsoft.Build.Utilities.v4.0" />
<Reference Include="Microsoft.Build.Tasks.v4.0" />
<Code Type="Class" Language="cs">
<![CDATA[
using System;
using Microsoft.Build.Utilities;
using Microsoft.Build.Framework;
using System.Diagnostics;
using System.Runtime.InteropServices;
public class SofterExec : Task, ICancelableTask
{
[DllImport("kernel32.dll", SetLastError=true)]
static extern bool GenerateConsoleCtrlEvent(ConsoleCtrlEvent sigevent, int dwProcessGroupId);
public enum ConsoleCtrlEvent
{
CTRL_C = 0,
CTRL_BREAK = 1,
CTRL_CLOSE = 2,
CTRL_LOGOFF = 5,
CTRL_SHUTDOWN = 6
}
public string Command { get; set; }
private volatile bool m_Cancel = false;
public SofterExec()
{
}
public void Cancel()
{
m_Cancel = true;
}
public override bool Execute()
{
try
{
using (Process p = new Process())
{
p.StartInfo.FileName = "cmd";
p.StartInfo.Arguments = "/c \"" + Command + "\"";
p.StartInfo.UseShellExecute = false;
p.StartInfo.RedirectStandardOutput = true;
p.StartInfo.RedirectStandardError = true;
p.OutputDataReceived += (object sender, DataReceivedEventArgs line) => {
if (line.Data != null)
Log.LogMessageFromText(line.Data, MessageImportance.High);
};
p.ErrorDataReceived += (object sender, DataReceivedEventArgs line) => {
if (line.Data != null)
Log.LogMessageFromText(line.Data, MessageImportance.High);
};
p.Start();
p.BeginOutputReadLine();
p.BeginErrorReadLine();
while (!p.WaitForExit(100))
{
if (m_Cancel)
{
// Keep sending CTRL+C events - sometimes it takes more than one..
GenerateConsoleCtrlEvent(ConsoleCtrlEvent.CTRL_C, 0);
}
}
p.WaitForExit();
return m_Cancel ? false : p.ExitCode == 0;
}
}
catch(Exception e)
{
Console.WriteLine(e);
return false;
}
}
}
]]>
</Code>
</Task>
</UsingTask>
<Target Name="CoreClean">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeCleanCommandLine" Condition="'$(NMakeCleanCommandLine)'==''"/>
<SofterExec Command="$(NMakeCleanCommandLine)" Condition="'$(NMakeCleanCommandLine)'!=''"/>
</Target>
<Target Name="Build" DependsOnTargets="PrepareForNMakeBuild;ResolveReferences;GetTargetPath" Returns="$(NMakeManagedOutput)">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeBuildCommandLine" Condition="'$(NMakeBuildCommandLine)'==''"/>
<SofterExec Command="$(NMakeBuildCommandLine)" Condition="'$(NMakeBuildCommandLine)'!=''"/>
</Target>
<Target Name="Rebuild" DependsOnTargets="PrepareForNMakeBuild;Clean;ResolveReferences;GetTargetPath" Returns="$(NMakeManagedOutput)">
<VCMessage Code="MSB8005" Type="Warning" Arguments="NMakeReBuildCommandLine" Condition="'$(NMakeReBuildCommandLine)'==''"/>
<SofterExec Command="$(NMakeReBuildCommandLine)" Condition="'$(NMakeReBuildCommandLine)'!=''"/>
</Target>
<!-- *******************************************************************************************
GetResolved Native Targets
Since Makefile doesn't import Microsoft.common.targets or microsoft.cppbuild.targets,
it needs to have its own set of project to project reference targets.
******************************************************************************************* -->
<Target Name="GetResolvedLinkObjs" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetResolvedLinkLibs" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetResolvedXDCMake" DependsOnTargets="GetNativeTargetPath" Returns="@(NMakeNativeOutput)" />
<Target Name="GetCopyToOutputDirectoryItems" />
<Target Name="SetToGetNativeTargetPath" >
<ItemGroup>
<ProjectReference>
<Targets Condition="'%(Extension)' == '.vcxproj'">GetNativeTargetPath;%(Targets)</Targets>
</ProjectReference>
</ItemGroup>
</Target>
<Target Name="GetNativeTargetPath" Returns="@(NMakeNativeOutput)">
<ItemGroup>
<NMakeNativeOutput Condition="'$(CLRSupport)' == '' or '$(CLRSupport)' == 'false'" Include="$(TargetPath)" />
</ItemGroup>
<ItemGroup>
<NMakeNativeOutput Condition="'@(NMakeNativeOutput)' != ''" >
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.obj'">obj</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.lib'">lib</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.dll'">dll</FileType>
<FileType Condition="'%(NMakeNativeOutput.Extension)' == '.xdc'">xdc</FileType>
</NMakeNativeOutput>
</ItemGroup>
</Target>
<Target Name="GetTargetPath" Returns="$(NMakeManagedOutput)">
<PropertyGroup>
<NMakeManagedOutput Condition="'$(CLRSupport)' != '' and '$(CLRSupport)' != 'false'">$(TargetPath)</NMakeManagedOutput>
</PropertyGroup>
</Target>
<Target Name="GetNativeManifest" />
<!-- *******************************************************************************************
Property pages
******************************************************************************************* -->
<ItemGroup Condition="'$(UseDefaultPropertyPageSchemas)' != 'false'">
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\ProjectItemsSchema.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\directories.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\debugger_*.xml" />
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\nmake.xml" />
<!-- project only rules -->
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\general_makefile.xml">
<Context>Project</Context>
</PropertyPageSchema>
<!-- Property sheet only rules -->
<PropertyPageSchema Include="$(VCTargetsPath)$(LangID)\general_makefile_ps.xml;$(VCTargetsPath)$(LangID)\usermacros.xml">
<Context>PropertySheet</Context>
</PropertyPageSchema>
</ItemGroup>
</Project>

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2010 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("11.00", "2010")

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2012 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("12.00", "2012")

View File

@ -0,0 +1,7 @@
-- Microsoft Visual Studio 2013 Solution/Project file generation
module(..., package.seeall)
local msvc_common = require "tundra.ide.msvc-common"
msvc_common.setup("12.00", "2013")

View File

@ -0,0 +1,735 @@
-- Xcode 3 (works in 4 as well) Workspace/Project file generation
module(..., package.seeall)
local path = require "tundra.path"
local nodegen = require "tundra.nodegen"
local util = require "tundra.util"
local native = require "tundra.native"
local xcode_generator = {}
local xcode_generator = {}
xcode_generator.__index = xcode_generator
function xcode_generator:generate_workspace(fn, projects)
local sln = io.open(fn, 'wb')
sln:write('<?xml version="1.0" encoding="UTF-8"?>\n')
sln:write('<Workspace\n')
sln:write('\tversion = "1.0">\n')
for _, proj in ipairs(projects) do
local name = proj.Decl.Name
local fname = proj.RelativeFilename
if fname == '.' then fname = ''
else fname = fname ..'/'
end
sln:write('\t<FileRef\n')
sln:write('\t\tlocation = "group:', name .. '.xcodeproj">\n')
sln:write('\t</FileRef>\n')
end
sln:write('</Workspace>\n')
end
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary",
}
local function get_absolute_output_path(env)
local base_dir = env:interpolate('$(OBJECTROOT)$(SEP)')
local cwd = native.getcwd()
return cwd .. "/" .. base_dir
end
local function newid(data)
local string = native.digest_guid(data)
-- a bit ugly but is to match the xcode style of UIds
return string.sub(string.gsub(string, '-', ''), 1, 24)
end
local function getfiletype(name)
local types = {
[".c"] = "sourcecode.c.c",
[".cc"] = "sourcecode.cpp.cpp",
[".cpp"] = "sourcecode.cpp.cpp",
[".css"] = "text.css",
[".cxx"] = "sourcecode.cpp.cpp",
[".framework"] = "wrapper.framework",
[".gif"] = "image.gif",
[".h"] = "sourcecode.c.h",
[".html"] = "text.html",
[".lua"] = "sourcecode.lua",
[".m"] = "sourcecode.c.objc",
[".mm"] = "sourcecode.cpp.objc",
[".nib"] = "wrapper.nib",
[".pch"] = "sourcecode.c.h",
[".plist"] = "text.plist.xml",
[".strings"] = "text.plist.strings",
[".xib"] = "file.xib",
[".icns"] = "image.icns",
[""] = "compiled.mach-o.executable",
}
return types[path.get_extension(name)] or "text"
end
local function get_project_data(unit, env)
local decl = unit.Decl
if decl.Name and project_types[unit.Keyword] then
local relative_fn = decl.Name
local sources = util.flatten(decl.Sources) or {}
sources = util.filter(sources, function (x) return type(x) == "string" end)
if decl.SourceDir then
sources = util.map(sources, function (x) return decl.SourceDir .. x end)
end
local source_list = {}
-- Rebuild source list with ids that is needed by the xcode project layout
for _, fn in ipairs(sources) do
source_list[newid(fn)] = fn
end
return {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = relative_fn,
Guid = newid(decl.Name .. "ProjectId"),
}
elseif unit.Keyword == "OsxBundle" then
decl.Name = "OsxBundle"
local source_list = {}
source_list[newid(decl.InfoPList)] = decl.InfoPList
for _, resource in ipairs(decl.Resources) do
if resource.Decl then
source_list[newid(resource.Decl.Source)] = resource.Decl.Source
end
end
return {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = "$(OBJECTDIR)/MyApp.app",
Guid = newid("OsxBundle"),
}
else
return nil
end
end
local function sort_filelist(source_list)
local dest = {}
for k, v in pairs(source_list) do table.insert(dest, { Key = k, Value = v }) end
table.sort(dest, function(a, b) return a.Value < b.Value end)
return dest
end
local function write_file_refs(p, projects)
p:write('/* Begin FBXFileReference section */\n')
local cwd = native.getcwd();
-- build the source list
local full_source_list = {}
for _, project in pairs(projects) do
local sources = project.Sources
for key, fn in pairs(sources) do
full_source_list[key] = fn
end
-- include executable names in the source list as well
if project.Type == "Program" then
full_source_list[newid(project.Decl.Name .. "Program")] = project.Decl.Name
end
end
local source_list = {}
-- As we can't sort hashtables we need to move this over to a regular table
source_list = sort_filelist(full_source_list)
for _, entry in pairs(source_list) do
local key = entry.Key
local fn = entry.Value
local name = path.get_filename(fn)
local file_type = getfiletype(fn)
local str = ""
if file_type == "compiled.mach-o.executable" then
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; includeInIndex = 0; path = "%s"; sourceTree = BUILT_PRODUCTS_DIR; };',
key, fn, file_type, name, fn)
else
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; path = "%s"; sourceTree = "<group>"; };',
key, fn, file_type, name, path.join(cwd, fn))
end
p:write(str, '\n')
end
p:write('/* End FBXFileReference section */\n\n')
end
local function write_legacy_targets(p, projects, env)
p:write('/* Begin PBXLegacyTarget section */\n')
local script_path = get_absolute_output_path(env)
for _, project in pairs(projects) do
local decl = project.Decl
if project.IsMeta then
--[[
isa = PBXLegacyTarget;
buildArgumentsString = "";
buildConfigurationList = D7D12762170E4CF98A79B5EF /* Build configuration list for PBXLegacyTarget "!UpdateWorkspace" */;
buildPhases = (
);
buildToolPath = /Users/danielcollin/unity_ps3/ps3/Projects/JamGenerated/_workspace.xcode_/updateworkspace;
dependencies = (
);
name = "!UpdateWorkspace";
passBuildSettingsInEnvironment = 1;
productName = "!UpdateWorkspace";
--]]
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXLegacyTarget;\n')
p:write('\t\t\tbuildArgumentsString = "', project.MetaData.BuildArgs, '";\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXLegacyTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildToolPath = ', script_path .. project.MetaData.BuildTool, ';\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tpassBuildSettingsInEnvironment = 1;\n')
p:write('\t\t\tproductName = "', decl.Name or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXLegacyTarget section */\n')
end
local function write_native_targes(p, projects)
p:write('/* Begin PBXNativeTarget section */\n')
local categories = {
["Program"] = "com.apple.product-type.tool",
["StaticLibrary"] = "com.apple.product-type.library.static",
["SharedLibrary"] = "com.apple.product-type.library.dynamic",
}
for _, project in pairs(projects) do
local decl = project.Decl
if not project.IsMeta then
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXNativeTarget;\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXNativeTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t\t', newid(decl.Name .. "ShellScript"), ' /* ShellScript */,\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildRules = (\n\t\t\t);\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tProductName = "', decl.Name, '";\n')
p:write('\t\t\tproductReference = ', newid(decl.Name .. "Program"), ' /* ', decl.Name, ' */;\n ')
p:write('\t\t\tproductType = "', categories[project.Type] or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXNativeTarget section */\n')
end
local function write_header(p)
p:write('// !$*UTF8*$!\n')
p:write('{\n')
p:write('\tarchiveVersion = 1;\n')
p:write('\tclasses = {\n')
p:write('\t};\n')
p:write('\tobjectVersion = 45;\n')
p:write('\tobjects = {\n')
p:write('\n')
end
local function get_projects(raw_nodes, env)
local projects = {}
local source_list = {}
source_list[newid("tundra.lua")] = "tundra.lua"
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name = "!BuildWorkspace"
projects[#projects + 1] = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "'' $(CONFIG) $(VARIANT) $(SUBVARIANT) $(ACTION)",
BuildTool = "xcodetundra" },
}
local meta_name = "!UpdateWorkspace"
projects[#projects + 1] = {
Decl = { Name = "!UpdateWorkspace", },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "",
BuildTool = "xcodeupdateproj" },
}
for _, unit in ipairs(raw_nodes) do
local data = get_project_data(unit, env)
if data then projects[#projects + 1] = data; end
end
return projects
end
local function split(fn)
local dir, file = fn:match("^(.*)[/\\]([^\\/]*)$")
if not dir then
return ".", fn
else
return dir, file
end
end
local function split_str(str, pat, name)
local t = {} -- NOTE: use {n = 0} in Lua-5.0
local fpat = "(.-)" .. pat
local last_end = 1
local s, e, cap = str:find(fpat, 1)
table.insert(t,name)
while s do
if s ~= 1 or cap ~= "" then
table.insert(t,cap)
end
last_end = e+1
s, e, cap = str:find(fpat, last_end)
end
if last_end <= #str then
cap = str:sub(last_end)
table.insert(t, cap)
end
return t
end
local function build_name_id(entry, offset, end_count)
local entryname = ""
for p = offset, end_count, 1 do
if entry[p] ~= nil then
entryname = entryname .. entry[p]
end
end
return newid(entryname)
end
local function make_indent(level)
local indent = '\t';
for i=1, level, 1 do
indent = indent .. '\t'
end
return indent
end
local function make_full_path( grp )
local full_path_string = grp.Name
local gparent = grp.Parent
while gparent ~= nil do
full_path_string = gparent.Name ..'/'..full_path_string
gparent = gparent.Parent
end
return full_path_string .. ' : ' .. grp.Key
end
local function write_group_ref(p, g, full_path)
p:write('\t\t', g.Key, ' /* ', full_path .. '/' .. g.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
local dirs = {}
local files = {}
for _, ref in pairs(g.Children) do
if ref.IsDir then
local key = ref.Key
dirs[#dirs + 1] = { Key = key, Name = ref.Name }
else
local key = ref.Key
files[#files + 1] = { Key = key, Name = ref.Name }
end
end
table.sort(dirs, function(a, b) return a.Name < b.Name end)
table.sort(files, function(a, b) return a.Name < b.Name end)
for _, ref in pairs(dirs) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, full_path .. '/' .. ref.Name))
end
for _, ref in pairs(files) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, full_path .. '/' .. ref.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', g.Name, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
end
local function print_children_2(p, children, path, level)
if children == nil then
return path
end
local c
local local_path = ''--path
for _, c in pairs(children) do
local indent = make_indent(level)
local_path = print_children_2( p, c.Children, path .. '/' .. c.Name, level + 1 )
if #c.Children ~= 0 then
write_group_ref(p, c, path)
end
end
return path
end
local function find_group(groups, group, parent)
if groups == nil then return nil end
for _, g in pairs(groups) do
if g.Name == group and g.Parent == parent then
return g
end
local r = find_group( g.Children, group, parent )
if r ~= nil then return r end
end
return nil
end
local function write_sources(p, children, name, parent)
local filelist = sort_filelist(children)
local groups = {};
table.insert(groups, {Name = name, Parent = nil, Key = parent, Children = {} })
for _, entry in pairs(filelist) do
local parent_group = nil
local path, filename = split(entry.Value)
local split_path = split_str(path, "/", name)
for i=1 , #split_path, 1 do
if split_path[i] ~= '.' then
local grp = find_group(groups, split_path[i], parent_group)
if grp == nil then
grp = { IsDir = true, Name=split_path[i], Parent=parent_group, Key=newid(util.tostring(parent_group)..split_path[i]), Children={} }
if parent_group == nil then
table.insert(groups, grp)
else
parent_group = grp.Parent
table.insert(parent_group.Children, grp)
end
end
parent_group = grp
end
end
if parent_group ~= nil then
table.insert(parent_group.Children, { IsDir = false, Name=filename, Parent=parent_group, Key = entry.Key, Children = {}} )
end
end
print_children_2(p, groups, '.', 0);
end
local function write_groups(p, projects)
p:write('/* Begin PBXGroup section */\n')
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
for _, project in pairs(projects) do
write_sources(p, project.Sources, project.Decl.Name, project.Guid)
end
-- write last group that links the projects names above
p:write('\t\t', all_targets_id, ' /* ', all_targets_name, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
for _, project in pairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', project.Guid, project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', all_targets_name, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
p:write('/* End PBXGroup section */\n\n')
end
local function write_project(p, projects)
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
local project_id = newid("ProjectObject")
local project_config_list_id = newid("ProjectObjectConfigList")
p:write('/* Begin PBXProject section */\n')
p:write('\t\t', project_id, ' /* Project object */ = {\n')
p:write('\t\t\tisa = PBXProject;\n')
p:write('\t\t\tbuildConfigurationList = ', project_config_list_id, ' /* Build configuration list for PBXProject "', "Project Object", '" */;\n')
p:write('\t\t\tcompatibilityVersion = "Xcode 3.1";\n')
p:write('\t\t\thasScannedForEncodings = 1;\n')
p:write('\t\t\tmainGroup = ', all_targets_id, ' /* ', all_targets_name, ' */;\n')
p:write('\t\t\tprojectDirPath = "";\n')
p:write('\t\t\tprojectRoot = "";\n')
p:write('\t\t\ttargets = (\n')
for _, project in pairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. "Target"), project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t};\n')
p:write('/* End PBXProject section */\n')
end
local function write_shellscripts(p, projects, env)
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
-- TODO: Do we really need to repead this for all projects? seems a bit wasteful
local xcodetundra_filename = get_absolute_output_path(env) .. "xcodetundra"
for _, project in pairs(projects) do
local name = project.Decl.Name
if not project.IsMeta then
p:write('\t\t', newid(name .. "ShellScript"), ' /* ShellScript */ = {\n')
p:write('\t\t\tisa = PBXShellScriptBuildPhase;\n')
p:write('\t\t\tbuildActionMask = 2147483647;\n')
p:write('\t\t\tfiles = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\tinputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\toutputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\trunOnlyForDeploymentPostprocessing = 0;\n')
p:write('\t\t\tshellPath = /bin/sh;\n')
p:write('\t\t\tshellScript = "', xcodetundra_filename, ' $TARGET_NAME $CONFIG $VARIANT $SUBVARIANT $ACTION -v";\n')
p:write('\t\t};\n')
end
end
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
end
local function get_full_config_name(config)
return config.Config.Name .. '-' .. config.Variant.Name .. '-' .. config.SubVariant
end
local function write_configs(p, projects, config_tuples, env)
p:write('/* Begin XCConfigurationList section */\n')
-- I wonder if we really need to do it this way for all configs?
for __, project in ipairs(projects) do
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local is_macosx_native = false
for _, host in util.nil_ipairs(tuple.Config.SupportedHosts) do
if host == "macosx" then
is_macosx_native = true
end
end
if "macosx" == tuple.Config.DefaultOnHost then
is_macosx_native = true
end
local config_id = newid(project.Decl.Name .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildSettings = {\n')
if is_macosx_native then
p:write('\t\t\t\tARCHS = "$(NATIVE_ARCH_ACTUAL)";\n')
end
p:write('\t\t\t\tVARIANT = "', tuple.Variant.Name, '";\n')
p:write('\t\t\t\tCONFIG = "', tuple.Config.Name, '";\n')
p:write('\t\t\t\tSUBVARIANT = "', tuple.SubVariant, '";\n')
if is_macosx_native and not project.IsMeta then
p:write('\t\t\t\tCONFIGURATION_BUILD_DIR = "', full_config_name, '";\n')
end
-- this is a little hack to get xcode to clean the whole output folder when using "FullBuild"
p:write('\t\t\t\tPRODUCT_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t\tTARGET_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End XCConfigurationList section */\n')
end
local function write_config_list(p, projects, config_tuples)
p:write('/* Begin XCConfigurationList section */\n')
local default_config = "";
-- find the default config
for _, tuple in ipairs(config_tuples) do
local is_macosx_native = tuple.Config.Name:match('^(%macosx)%-')
if is_macosx_native and tuple.Variant.Name == "debug" then
default_config = get_full_config_name(tuple)
break
end
end
-- if we did't find a default config just grab the first one
if default_config == "" then
default_config = get_full_config_name(config_tuples[0])
end
for __, project in ipairs(projects) do
local config_id = newid(project.Decl.Name .. 'Config')
p:write('\t\t', config_id, ' /* Build config list for "', project.Decl.Name, '" */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
end
p:write('/* End XCConfigurationList section */\n')
end
local function write_footer(p)
p:write('\t};\n')
p:write('\trootObject = ', newid("ProjectObject"), ' /* Project object */;\n')
p:write('}\n')
end
local function generate_shellscript(env)
local filename = path.join(get_absolute_output_path(env), "xcodetundra")
local p = assert(io.open(filename, 'wb'))
p:write("#/bin/sh\n")
p:write("TARGET_NAME=$1\n")
p:write("CONFIG=$2\n")
p:write("VARIANT=$3\n")
p:write("SUBVARIANT=$4\n")
p:write("ACTION=$5\n")
p:write('if [ "$5" = "clean" ]; then\n')
p:write(' ACTION="-c"\n')
p:write("fi\n\n")
p:write('if [ "$5" = "build" ]; then\n')
p:write(' ACTION=""\n')
p:write("fi\n\n")
p:write(TundraExePath .. " --full-paths $TARGET_NAME $CONFIG-$VARIANT-$SUBVARIANT $ACTION -v\n")
p:close()
os.execute("chmod +x " .. filename)
local filename = path.join(get_absolute_output_path(env), "xcodeupdateproj")
local p = io.open(filename, 'wb')
p:write("#/bin/sh\n")
p:write(TundraExePath .. " --ide-gen xcode3 -a\n")
p:close()
os.execute("chmod +x " .. filename)
end
function xcode_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names)
assert(config_tuples and #config_tuples > 0)
-- TODO: Set the first default config as default
local base_dir = env:interpolate('$(OBJECTROOT)$(SEP)')
local xcodeproj_dir = base_dir .. "tundra-generated.xcodeproj/"
native.mkdir(base_dir)
native.mkdir(xcodeproj_dir)
generate_shellscript(env)
local p = io.open(path.join(xcodeproj_dir, "project.pbxproj"), 'wb')
local projects = get_projects(raw_nodes, env)
write_header(p)
write_file_refs(p, projects)
write_groups(p, projects)
write_legacy_targets(p, projects, env)
write_native_targes(p, projects)
write_project(p, projects)
write_shellscripts(p, projects, env)
write_configs(p, projects, config_tuples, env)
write_config_list(p, projects, config_tuples)
write_footer(p)
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, xcode_generator)
state:generate_files(...)
end)

View File

@ -0,0 +1,924 @@
-- Xcode 5 Workspace/Project file generation
module(..., package.seeall)
local path = require "tundra.path"
local nodegen = require "tundra.nodegen"
local util = require "tundra.util"
local native = require "tundra.native"
local xcode_generator = {}
xcode_generator.__index = xcode_generator
function xcode_generator:generate_workspace(fn, projects)
local sln = io.open(fn, 'wb')
sln:write('<?xml version="1.0" encoding="UTF-8"?>\n')
sln:write('<Workspace\n')
sln:write('\tversion = "1.0">\n')
for _, proj in ipairs(projects) do
local name = proj.Decl.Name
local fname = proj.RelativeFilename
if fname == '.' then fname = ''
else fname = fname ..'/'
end
sln:write('\t<FileRef\n')
sln:write('\t\tlocation = "group:', name .. '.xcodeproj">\n')
sln:write('\t</FileRef>\n')
end
sln:write('</Workspace>\n')
end
local project_types = util.make_lookup_table {
"Program", "SharedLibrary", "StaticLibrary",
}
local toplevel_stuff = util.make_lookup_table {
".exe", ".lib", ".dll",
}
local binary_extension = util.make_lookup_table {
"", ".obj", ".o", ".a",
}
local header_exts = util.make_lookup_table {
".h", ".hpp", ".hh", ".inl",
}
local function newid(data)
local string = native.digest_guid(data)
-- a bit ugly but is to match the xcode style of UIds
return string.sub(string.gsub(string, '-', ''), 1, 24)
end
local file_types = {
[".c"] = "sourcecode.c.c",
[".cc"] = "sourcecode.cpp.cpp",
[".cpp"] = "sourcecode.cpp.cpp",
[".css"] = "text.css",
[".cxx"] = "sourcecode.cpp.cpp",
[".framework"] = "wrapper.framework",
[".gif"] = "image.gif",
[".h"] = "sourcecode.c.h",
[".html"] = "text.html",
[".lua"] = "sourcecode.lua",
[".m"] = "sourcecode.c.objc",
[".mm"] = "sourcecode.cpp.objc",
[".nib"] = "wrapper.nib",
[".pch"] = "sourcecode.c.h",
[".plist"] = "text.plist.xml",
[".strings"] = "text.plist.strings",
[".xib"] = "file.xib",
[".icns"] = "image.icns",
[""] = "compiled.mach-o.executable",
}
local function getfiletype(name)
return file_types[path.get_extension(name)] or "text"
end
-- Scan for sources, following dependencies until those dependencies seem to be a different top-level unit
local function get_sources(dag, sources, generated, dag_lut)
for _, output in ipairs(dag.outputs) do
local ext = path.get_extension(output)
if not binary_extension[ext] then
generated[output] = true
sources[output] = true -- pick up generated headers
end
end
for _, input in ipairs(dag.inputs) do
local ext = path.get_extension(input)
if not binary_extension[ext] then
sources[input] = true
end
end
for _, dep in util.nil_ipairs(dag.deps) do
if not dag_lut[dep] then -- don't go into other top-level DAGs
get_sources(dep, sources, generated, dag_lut)
end
end
end
local function get_headers(unit, sources, dag_lut, name_to_dags)
local src_dir = ''
if not unit.Decl then
-- Ignore ExternalLibrary and similar that have no data.
return
end
if unit.Decl.SourceDir then
src_dir = unit.Decl.SourceDir .. '/'
end
for _, src in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Sources)) do
if type(src) == "string" then
local ext = path.get_extension(src)
if header_exts[ext] then
local full_path = path.normalize(src_dir .. src)
sources[full_path] = true
end
end
end
local function toplevel(u)
if type(u) == "string" then
return type(name_to_dags[u]) ~= "nil"
end
for _, dag in pairs(u.Decl.__DagNodes) do
if dag_lut[dag] then
return true
end
end
return false
end
-- Repeat for dependencies ObjGroups
for _, dep in util.nil_ipairs(nodegen.flatten_list('*-*-*-*', unit.Decl.Depends)) do
if not toplevel(dep) then
get_headers(dep, sources, dag_lut)
end
end
end
local function sort_filelist(source_list)
local dest = {}
for k, v in pairs(source_list) do table.insert(dest, { Key = k, Value = v }) end
table.sort(dest, function(a, b) return a.Value < b.Value end)
return dest
end
local function write_file_refs(p, projects)
p:write('/* Begin FBXFileReference section */\n')
local cwd = native.getcwd();
-- build the source list
local full_source_list = {}
for _, project in ipairs(projects) do
local sources = project.Sources
for key, fn in pairs(sources) do
full_source_list[key] = fn
end
-- include executable names in the source list as well
if project.Type == "Program" then
full_source_list[newid(project.Decl.Name .. "Program")] = project.Decl.Name
end
end
local source_list = {}
-- As we can't sort hashtables we need to move this over to a regular table
source_list = sort_filelist(full_source_list)
for _, entry in pairs(source_list) do
local key = entry.Key
local fn = entry.Value
local name = path.get_filename(fn)
local file_type = getfiletype(fn)
local str = ""
if file_type == "compiled.mach-o.executable" then
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; includeInIndex = 0; path = "%s"; sourceTree = BUILT_PRODUCTS_DIR; };',
key, fn, file_type, name, fn)
else
str = string.format('\t\t%s /* %s */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = %s; name = "%s"; path = "%s"; sourceTree = "<group>"; };',
key, fn, file_type, name, path.join(cwd, fn))
end
p:write(str, '\n')
end
p:write('/* End FBXFileReference section */\n\n')
end
local function write_legacy_targets(p, projects, env)
p:write('/* Begin PBXLegacyTarget section */\n')
for _, project in ipairs(projects) do
local decl = project.Decl
if project.IsMeta then
--[[
isa = PBXLegacyTarget;
buildArgumentsString = "";
buildConfigurationList = D7D12762170E4CF98A79B5EF /* Build configuration list for PBXLegacyTarget "!UpdateWorkspace" */;
buildPhases = (
);
buildToolPath = /Users/danielcollin/unity_ps3/ps3/Projects/JamGenerated/_workspace.xcode_/updateworkspace;
dependencies = (
);
name = "!UpdateWorkspace";
passBuildSettingsInEnvironment = 1;
productName = "!UpdateWorkspace";
--]]
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXLegacyTarget;\n')
p:write('\t\t\tbuildArgumentsString = "', project.MetaData.BuildArgs, '";\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXLegacyTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildToolPath = ', project.MetaData.BuildTool, ';\n')
p:write('\t\t\tbuildWorkingDirectory = ', '..', ';\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tpassBuildSettingsInEnvironment = 1;\n')
p:write('\t\t\tproductName = "', decl.Name or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXLegacyTarget section */\n')
end
local function write_native_targes(p, projects)
p:write('/* Begin PBXNativeTarget section */\n')
local categories = {
["Program"] = "com.apple.product-type.tool",
["StaticLibrary"] = "com.apple.product-type.library.static",
["SharedLibrary"] = "com.apple.product-type.library.dynamic",
}
for _, project in ipairs(projects) do
local decl = project.Decl
if not project.IsMeta then
p:write('\t\t', newid(decl.Name .. "Target"), ' /* ', decl.Name, ' */ = {\n')
p:write('\t\t\tisa = PBXNativeTarget;\n')
p:write('\t\t\tbuildConfigurationList = ', newid(decl.Name .. 'Config'), ' /* Build configuration list for PBXNativeTarget "',decl.Name, '" */;\n')
p:write('\t\t\tbuildPhases = (\n')
p:write('\t\t\t\t', newid(decl.Name .. "ShellScript"), ' /* ShellScript */,\n')
p:write('\t\t\t);\n');
p:write('\t\t\tbuildRules = (\n\t\t\t);\n')
p:write('\t\t\tdependencies = (\n\t\t\t);\n')
p:write('\t\t\tname = "', decl.Name, '";\n')
p:write('\t\t\tProductName = "', decl.Name, '";\n')
p:write('\t\t\tproductReference = ', newid(decl.Name .. "Program"), ' /* ', decl.Name, ' */;\n ')
p:write('\t\t\tproductType = "', categories[project.Type] or "", '";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXNativeTarget section */\n')
end
local function write_header(p)
p:write('// !$*UTF8*$!\n')
p:write('{\n')
p:write('\tarchiveVersion = 1;\n')
p:write('\tclasses = {\n')
p:write('\t};\n')
p:write('\tobjectVersion = 45;\n')
p:write('\tobjects = {\n')
p:write('\n')
end
local function get_projects(raw_nodes, env, hints, ide_script)
local projects = {}
-- Filter out stuff we don't care about.
local units = util.filter(raw_nodes, function (u)
return u.Decl.Name and project_types[u.Keyword]
end)
local dag_node_lut = {} -- lookup table of all named, top-level DAG nodes
local name_to_dags = {} -- table mapping unit name to array of dag nodes (for configs)
-- Map out all top-level DAG nodes
for _, unit in ipairs(units) do
local decl = unit.Decl
local dag_nodes = assert(decl.__DagNodes, "no dag nodes for " .. decl.Name)
for build_id, dag_node in pairs(dag_nodes) do
dag_node_lut[dag_node] = unit
local array = name_to_dags[decl.Name]
if not array then
array = {}
name_to_dags[decl.Name] = array
end
array[#array + 1] = dag_node
end
end
-- Sort units based on dependency complexity. We want to visit the leaf nodes
-- first so that any source file references are picked up as close to the
-- bottom of the dependency chain as possible.
local unit_weights = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local stack = { }
for _, dag in pairs(decl.__DagNodes) do
stack[#stack + 1] = dag
end
local weight = 0
while #stack > 0 do
local node = table.remove(stack)
if dag_node_lut[node] then
weight = weight + 1
end
for _, dep in util.nil_ipairs(node.deps) do
stack[#stack + 1] = dep
end
end
unit_weights[unit] = weight
end
table.sort(units, function (a, b)
return unit_weights[a] < unit_weights[b]
end)
-- Keep track of what source files have already been grabbed by other projects.
local grabbed_sources = {}
for _, unit in ipairs(units) do
local decl = unit.Decl
local name = decl.Name
local sources = {}
local generated = {}
for build_id, dag_node in pairs(decl.__DagNodes) do
get_sources(dag_node, sources, generated, dag_node_lut)
end
-- Explicitly add all header files too as they are not picked up from the DAG
-- Also pick up headers from non-toplevel DAGs we're depending on
get_headers(unit, sources, dag_node_lut, name_to_dags)
-- Figure out which project should get this data.
local output_name = name
local ide_hints = unit.Decl.IdeGenerationHints
if ide_hints then
if ide_hints.OutputProject then
output_name = ide_hints.OutputProject
end
end
-- Rebuild source list with ids that are needed by the xcode project layout
local source_list = {}
for src, _ in pairs(sources) do
local norm_src = path.normalize(src)
-- if not grabbed_sources[norm_src] then
grabbed_sources[norm_src] = unit
source_list[newid(norm_src)] = norm_src
-- end
end
projects[name] = {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = name,
Guid = newid(name .. "ProjectId"),
IdeGenerationHints = unit.Decl.IdeGenerationHints
}
end
for _, unit in ipairs(raw_nodes) do
if unit.Keyword == "OsxBundle" then
local decl = unit.Decl
decl.Name = "OsxBundle"
local source_list = {[newid(decl.InfoPList)] = decl.InfoPList}
for _, resource in util.nil_ipairs(decl.Resources) do
if resource.Decl then
source_list[newid(resource.Decl.Source)] = resource.Decl.Source
end
end
projects["OsxBundle"] = {
Type = unit.Keyword,
Decl = decl,
Sources = source_list,
RelativeFilename = "$(OBJECTDIR)/MyApp.app",
Guid = newid("OsxBundle"),
}
end
end
return projects
end
local function split(fn)
local dir, file = fn:match("^(.*)[/\\]([^\\/]*)$")
if not dir then
return ".", fn
else
return dir, file
end
end
local function split_str(str, pat)
local t = {} -- NOTE: use {n = 0} in Lua-5.0
local fpat = "(.-)" .. pat
local last_end = 1
local s, e, cap = str:find(fpat, 1)
while s do
if s ~= 1 or cap ~= "" then
table.insert(t,cap)
end
last_end = e+1
s, e, cap = str:find(fpat, last_end)
end
if last_end <= #str then
cap = str:sub(last_end)
table.insert(t, cap)
end
return t
end
local function print_children_2(p, groupname, key, children, path)
for name, c in pairs(children) do
if c.Type > 0 then
print_children_2(p, name, c.Key, c.Children, c.Type == 1 and path..'/'..name or path)
end
end
p:write('\t\t', key, ' /* ', path, ' */ = {\n')
p:write('\t\t\tisa = PBXGroup;\n')
p:write('\t\t\tchildren = (\n')
local dirs = {}
local files = {}
for name, ref in pairs(children) do
if ref.Type > 0 then
dirs[#dirs + 1] = { Key = ref.Key, Name = name }
else
files[#files + 1] = { Key = ref.Key, Name = name }
end
end
table.sort(dirs, function(a, b) return a.Name < b.Name end)
table.sort(files, function(a, b) return a.Name < b.Name end)
for i, ref in pairs(dirs) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, path .. '/' .. ref.Name))
end
for i, ref in pairs(files) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', ref.Key, path .. '/' .. ref.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tname = "', groupname, '"; \n');
p:write('\t\t\tsourceTree = "<group>";\n');
p:write('\t\t};\n')
end
local function prune_groups(group)
local i = 0
local first_name
local first_child
for name, child in pairs(group.Children) do
first_name = name
first_child = child
i = i + 1
end
if i == 1 and first_child.Type > 0 then
local new_name = prune_groups(first_child)
group.Children = first_child.Children;
if not new_name then
new_name = first_name
end
return new_name
else
local children = {}
for name, child in pairs(group.Children) do
if child.Type > 0 then
local new_name = prune_groups(child)
if new_name then
name = new_name
end
end
children[name] = child
end
group.children = children
return nil
end
end
local function make_groups(p, files, key)
local filelist = sort_filelist(files)
local group = { Type = 2, Key = key, Children = {} }
for _, entry in pairs(filelist) do
local parent_group = group
local path, filename = split(entry.Value)
for i, part in ipairs(split_str(path, "/")) do
if part ~= '.' then
local grp = parent_group.Children[part]
if grp == nil then
grp = { Type = 1, Key=newid(util.tostring(parent_group)..part), Children={} }
parent_group.Children[part] = grp
end
parent_group = grp
end
end
parent_group.Children[filename] = { Type = 0, Key = entry.Key }
end
-- prune single-entry groups
prune_groups(group)
return group
end
local function write_groups(p, projects)
p:write('/* Begin PBXGroup section */\n')
-- Map folder names to array of projects under that folder
local folders = {}
for _, project in ipairs(projects) do
local hints = project.IdeGenerationHints
local msvc_hints = hints and hints.Msvc
local fname = msvc_hints and msvc_hints.SolutionFolder
if fname == nil then
fname = "<root>"
end
local folder = folders[fname]
if folder == nil then
folder = { Type = 2, Key = newid("Folder"..fname), Children = {} }
folders[fname] = folder
end
folder.Children[project.Decl.Name] = make_groups(p, project.Sources, project.Guid)
end
local root = folders["<root>"];
for name, folder in pairs(folders) do
if folder ~= root then
root.Children[name] = folder
end
end
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
print_children_2(p, all_targets_name, all_targets_id, root.Children, '.');
-- write last group that links the projects names above
-- local all_targets_name = "AllTargets.workspace"
-- local all_targets_id = newid(all_targets_name)
-- p:write('\t\t', all_targets_id, ' /* ', all_targets_name, ' */ = {\n')
-- p:write('\t\t\tisa = PBXGroup;\n')
-- p:write('\t\t\tchildren = (\n')
-- for _, project in pairs(projects) do
-- p:write(string.format('\t\t\t\t%s /* %s */,\n', project.Guid, project.Decl.Name))
-- end
-- p:write('\t\t\t);\n')
-- p:write('\t\t\tname = "', all_targets_name, '"; \n');
-- p:write('\t\t\tsourceTree = "<group>";\n');
-- p:write('\t\t};\n')
p:write('/* End PBXGroup section */\n\n')
end
local function write_project(p, projects)
local all_targets_name = "AllTargets.workspace"
local all_targets_id = newid(all_targets_name)
local project_id = newid("ProjectObject")
local project_config_list_id = newid("ProjectObjectConfigList")
p:write('/* Begin PBXProject section */\n')
p:write('\t\t', project_id, ' /* Project object */ = {\n')
p:write('\t\t\tisa = PBXProject;\n')
p:write('\t\t\tbuildConfigurationList = ', project_config_list_id, ' /* Build configuration list for PBXProject "', "Project Object", '" */;\n')
p:write('\t\t\tcompatibilityVersion = "Xcode 3.1";\n')
p:write('\t\t\thasScannedForEncodings = 1;\n')
p:write('\t\t\tmainGroup = ', all_targets_id, ' /* ', all_targets_name, ' */;\n')
p:write('\t\t\tprojectDirPath = "";\n')
p:write('\t\t\tprojectRoot = "";\n')
p:write('\t\t\ttargets = (\n')
for _, project in ipairs(projects) do
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. "Target"), project.Decl.Name))
end
p:write('\t\t\t);\n')
p:write('\t\t};\n')
p:write('/* End PBXProject section */\n')
end
local function write_shellscripts(p, projects, env)
p:write('/* Begin PBXShellScriptBuildPhase section */\n')
-- TODO: Do we really need to repeat this for all projects? seems a bit wasteful
for _, project in ipairs(projects) do
local name = project.Decl.Name
if not project.IsMeta then
p:write('\t\t', newid(name .. "ShellScript"), ' /* ShellScript */ = {\n')
p:write('\t\t\tisa = PBXShellScriptBuildPhase;\n')
p:write('\t\t\tbuildActionMask = 2147483647;\n')
p:write('\t\t\tfiles = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\tinputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\toutputPaths = (\n')
p:write('\t\t\t);\n')
p:write('\t\t\trunOnlyForDeploymentPostprocessing = 0;\n')
p:write('\t\t\tshellPath = /bin/sh;\n')
p:write('\t\t\tshellScript = "cd ..\\n', TundraExePath, ' $(CONFIG)-$(VARIANT)-$(SUBVARIANT)";\n')
p:write('\t\t};\n')
end
end
p:write('/* End PBXShellScriptBuildPhase section */\n')
end
local function get_full_config_name(config)
return config.Config.Name .. '-' .. config.Variant.Name .. '-' .. config.SubVariant
end
local function write_configs(p, projects, config_tuples, env, set_env)
p:write('/* Begin XCBuildConfiguration section */\n')
-- I wonder if we really need to do it this way for all configs?
for _, project in ipairs(projects) do
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local is_macosx_native = false
for _, host in util.nil_ipairs(tuple.Config.SupportedHosts) do
if host == "macosx" then
is_macosx_native = true
end
end
if "macosx" == tuple.Config.DefaultOnHost then
is_macosx_native = true
end
local config_id = newid(project.Decl.Name .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildSettings = {\n')
if is_macosx_native then
p:write('\t\t\t\tARCHS = "$(NATIVE_ARCH_ACTUAL)";\n')
end
p:write('\t\t\t\tVARIANT = "', tuple.Variant.Name, '";\n')
p:write('\t\t\t\tCONFIG = "', tuple.Config.Name, '";\n')
p:write('\t\t\t\tSUBVARIANT = "', tuple.SubVariant, '";\n')
if is_macosx_native and not project.IsMeta then
p:write('\t\t\t\tCONFIGURATION_BUILD_DIR = "', full_config_name, '";\n')
end
-- this is a little hack to get xcode to clean the whole output folder when using "FullBuild"
p:write('\t\t\t\tPRODUCT_NAME = "',project.Decl.Name , '";\n')
p:write('\t\t\t\tTARGET_NAME = "',project.Decl.Name , '";\n')
for i, var in ipairs(set_env) do
p:write('\t\t\t\t', var, ' = "', os.getenv(var), '";\n')
end
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
end
-- PBXProject configurations
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
local config_id = newid("ProjectObject" .. full_config_name)
p:write('\t\t', config_id, ' = {\n')
p:write('\t\t\tisa = XCBuildConfiguration;\n')
p:write('\t\t\tbuildSettings = {\n')
p:write('\t\t\t};\n')
p:write('\t\t\tname = "',full_config_name , '";\n')
p:write('\t\t};\n')
end
p:write('/* End XCBuildConfiguration section */\n')
end
local function write_config_list(p, projects, config_tuples)
p:write('/* Begin XCConfigurationList section */\n')
local default_config = "";
-- find the default config
for _, tuple in ipairs(config_tuples) do
local is_macosx_native = tuple.Config.Name:match('^(%macosx)%-')
if is_macosx_native and tuple.Variant.Name == "debug" then
default_config = get_full_config_name(tuple)
break
end
end
-- if we did't find a default config just grab the first one
if default_config == "" then
default_config = get_full_config_name(config_tuples[1])
end
for __, project in ipairs(projects) do
local config_id = newid(project.Decl.Name .. 'Config')
p:write('\t\t', config_id, ' /* Build config list for "', project.Decl.Name, '" */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid(project.Decl.Name .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
end
-- PBXProject configuration list
local config_id = newid("ProjectObjectConfigList")
p:write('\t\t', config_id, ' /* Build config list for PBXProject */ = {\n')
p:write('\t\t\tisa = XCConfigurationList;\n')
-- Don't add any think extra if subvariant is default
p:write('\t\t\tbuildConfigurations = (\n')
for _, tuple in ipairs(config_tuples) do
local full_config_name = get_full_config_name(tuple)
p:write(string.format('\t\t\t\t%s /* %s */,\n', newid("ProjectObject" .. full_config_name), full_config_name))
end
p:write('\t\t\t);\n')
p:write('\t\t\tdefaultConfigurationIsVisible = 1;\n')
p:write('\t\t\tdefaultConfigurationName = "', default_config, '";\n')
p:write('\t\t};\n')
p:write('/* End XCConfigurationList section */\n')
end
local function write_footer(p)
p:write('\t};\n')
p:write('\trootObject = ', newid("ProjectObject"), ' /* Project object */;\n')
p:write('}\n')
end
local function make_meta_projects(ide_script)
local source_list = {
[newid("tundra.lua")] = "tundra.lua"
}
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name1 = "!BuildWorkspace"
local meta_name2 = "!UpdateWorkspace"
return {
{
Decl = { Name = meta_name1, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name1 .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "-v $(CONFIG)-$(VARIANT)-$(SUBVARIANT)", BuildTool = TundraExePath },
},
{
Decl = { Name = meta_name2, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name2 .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "--g " .. ide_script, BuildTool = TundraExePath },
}
}
end
function xcode_generator:generate_files(ngen, config_tuples, raw_nodes, env, default_names, hints, ide_script)
assert(config_tuples and #config_tuples > 0)
hints = hints or {}
hints = hints.Xcode or {}
local base_dir = hints.BaseDir and (hints.BaseDir .. '/') or env:interpolate('$(OBJECTROOT)$(SEP)')
native.mkdir(base_dir)
local projects = get_projects(raw_nodes, env, hints, ide_script)
local source_list = {
[newid("tundra.lua")] = "tundra.lua"
}
local units = io.open("units.lua")
if units then
source_list[newid("units.lua")] = "units.lua"
io.close(units)
end
local meta_name = "!BuildWorkspace"
local build_project = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "$(CONFIG)-$(VARIANT)-$(SUBVARIANT)", BuildTool = TundraExePath },
}
local meta_name = "!UpdateWorkspace"
local generate_project = {
Decl = { Name = meta_name, },
Type = "LegacyTarget",
RelativeFilename = "",
Sources = source_list,
Guid = newid(meta_name .. 'ProjectId'),
IsMeta = true,
MetaData = { BuildArgs = "--g " .. ide_script, BuildTool = TundraExePath },
}
local solution_hints = hints.Projects
if not solution_hints then
print("No IdeGenerationHints.Xcode.Projects specified - using defaults")
solution_hints = {
['tundra-generated.sln'] = { }
}
end
for name, data in pairs(solution_hints) do
local sln_projects = { build_project, generate_project }
if data.Projects then
for _, pname in ipairs(data.Projects) do
local pp = projects[pname]
if not pp then
errorf("can't find project %s for inclusion in %s -- check your Projects data", pname, name)
end
sln_projects[#sln_projects + 1] = pp
end
else
-- All the projects (that are not meta)
for pname, pp in pairs(projects) do
sln_projects[#sln_projects + 1] = pp
end
end
local proj_dir = base_dir .. path.drop_suffix(name) .. ".xcodeproj/"
native.mkdir(proj_dir)
local p = io.open(path.join(proj_dir, "project.pbxproj"), 'wb')
write_header(p)
write_file_refs(p, sln_projects)
write_groups(p, sln_projects)
write_legacy_targets(p, sln_projects, env)
write_native_targes(p, sln_projects)
write_project(p, sln_projects)
write_shellscripts(p, sln_projects, env)
write_configs(p, sln_projects, config_tuples, env, hints.EnvVars or {})
write_config_list(p, sln_projects, config_tuples)
write_footer(p)
end
end
nodegen.set_ide_backend(function(...)
local state = setmetatable({}, xcode_generator)
state:generate_files(...)
end)

View File

@ -0,0 +1,34 @@
module(..., package.seeall)
init_tundra_lua = [====[
local CFiles = { ".c", ".h" }
Build {
Configs = {
Config {
Name = "generic-gcc",
DefaultOnHost = "linux",
Tools = { "gcc" },
},
Config {
Name = "macosx-gcc",
DefaultOnHost = "macosx",
Tools = { "gcc-osx" },
},
Config {
Name = "win64-msvc",
DefaultOnHost = "windows",
Tools = { "msvc-vs2008"; TargetPlatform = "x64" },
},
},
Units = function()
require "tundra.syntax.glob"
Program {
Name = "a.out",
Sources = { Glob { Dir = ".", Extensions = CFiles } },
}
Default "a.out"
end,
}
]====]

View File

@ -0,0 +1,914 @@
module(..., package.seeall)
local unitgen = require "tundra.unitgen"
local util = require "tundra.util"
local path = require "tundra.path"
local depgraph = require "tundra.depgraph"
local buildfile = require "tundra.buildfile"
local native = require "tundra.native"
local ide_backend = nil
local current = nil
local _nodegen = { }
_nodegen.__index = _nodegen
local function syntax_error(msg, ...)
error { Class = 'syntax error', Message = string.format(msg, ...) }
end
local function validate_boolean(name, value)
if type(value) == "boolean" then
return value
end
syntax_error("%s: expected boolean value, got %q", name, type(value))
end
local function validate_string(name, value)
if type(value) == "string" then
return value
end
syntax_error("%s: expected string value, got %q", name, type(value))
end
local function validate_pass(name, value)
if type(value) == "string" then
return value
else
syntax_error("%s: expected pass name, got %q", name, type(value))
end
end
local function validate_table(name, value)
-- A single string can be converted into a table value very easily
local t = type(value)
if t == "table" then
return value
elseif t == "string" then
return { value }
else
syntax_error("%s: expected table value, got %q", name, t)
end
end
local function validate_config(name, value)
if type(value) == "table" or type(value) == "string" then
return value
end
syntax_error("%s: expected config, got %q", name, type(value))
end
local validators = {
["string"] = validate_string,
["pass"] = validate_pass,
["table"] = validate_table,
["filter_table"] = validate_table,
["source_list"] = validate_table,
["boolean"] = validate_boolean,
["config"] = validate_config,
}
function _nodegen:validate()
local decl = self.Decl
for name, detail in pairs(assert(self.Blueprint)) do
local val = decl[name]
if not val then
if detail.Required then
syntax_error("%s: missing argument: '%s'", self.Keyword, name)
end
-- ok, optional value
else
local validator = validators[detail.Type]
decl[name] = validator(name, val)
end
end
for name, detail in pairs(decl) do
if not self.Blueprint[name] then
syntax_error("%s: unsupported argument: '%s'", self.Keyword, name)
end
end
end
function _nodegen:customize_env(env, raw_data)
-- available for subclasses
end
function _nodegen:configure_env(env, deps)
local build_id = env:get('BUILD_ID')
local propagate_blocks = {}
local decl = self.Decl
for _, dep_obj in util.nil_ipairs(deps) do
local data = dep_obj.Decl.Propagate
if data then
propagate_blocks[#propagate_blocks + 1] = data
end
end
local function push_bindings(env_key, data)
if data then
for _, item in util.nil_ipairs(flatten_list(build_id, data)) do
env:append(env_key, item)
end
end
end
local function replace_bindings(env_key, data)
if data then
local first = true
for _, item in util.nil_ipairs(flatten_list(build_id, data)) do
if first then
env:replace(env_key, item)
first = false
else
env:append(env_key, item)
end
end
end
end
-- Push Libs, Defines and so in into the environment of this unit.
-- These are named for convenience but are aliases for syntax niceness.
for decl_key, env_key in util.nil_pairs(self.DeclToEnvMappings) do
-- First pick settings from our own unit.
push_bindings(env_key, decl[decl_key])
for _, data in ipairs(propagate_blocks) do
push_bindings(env_key, data[decl_key])
end
end
-- Push Env blocks as is
for k, v in util.nil_pairs(decl.Env) do
push_bindings(k, v)
end
for k, v in util.nil_pairs(decl.ReplaceEnv) do
replace_bindings(k, v)
end
for _, block in util.nil_ipairs(propagate_blocks) do
for k, v in util.nil_pairs(block.Env) do
push_bindings(k, v)
end
for k, v in util.nil_pairs(block.ReplaceEnv) do
replace_bindings(k, v)
end
end
end
local function resolve_sources(env, items, accum, base_dir)
local ignored_exts = util.make_lookup_table(env:get_list("IGNORED_AUTOEXTS", {}))
for _, item in util.nil_ipairs(items) do
local type_name = type(item)
assert(type_name ~= "function")
if type_name == "userdata" then
accum[#accum + 1] = item
elseif type_name == "table" then
if depgraph.is_node(item) then
accum[#accum + 1] = item
elseif getmetatable(item) then
accum[#accum + 1] = item:get_dag(env)
else
resolve_sources(env, item, accum, item.SourceDir or base_dir)
end
else
assert(type_name == "string")
local ext = path.get_extension(item)
if not ignored_exts[ext] then
if not base_dir or path.is_absolute(item) then
accum[#accum + 1] = item
else
local p = path.join(base_dir, item)
accum[#accum + 1] = p
end
end
end
end
return accum
end
-- Analyze source list, returning list of input files and list of dependencies.
--
-- This is so you can pass a mix of actions producing files and regular
-- filenames as inputs to the next step in the chain and the output files of
-- such nodes will be used automatically.
--
-- list - list of source files and nodes that produce source files
-- suffixes - acceptable source suffixes to pick up from nodes in source list
local function analyze_sources(env, pass, list, suffixes)
if not list then
return nil
end
list = util.flatten(list)
local deps = {}
local function implicit_make(source_file)
local t = type(source_file)
if t == "table" then
return source_file
end
assert(t == "string")
local make = env:get_implicit_make_fn(source_file)
if make then
return make(env, pass, source_file)
else
return nil
end
end
local function transform(output, fn)
if type(fn) ~= "string" then
error(util.tostring(fn) .. " is not a string", 2)
end
local t = implicit_make(fn)
if t then
deps[#deps + 1] = t
t:insert_output_files(output, suffixes)
else
output[#output + 1] = fn
end
end
local files = {}
for _, src in ipairs(list) do
if depgraph.is_node(src) then
deps[#deps + 1] = src
src:insert_output_files(files, suffixes)
elseif type(src) == "table" then
error("non-DAG node in source list at this point")
else
files[#files + 1] = src
end
end
while true do
local result = {}
local old_dep_count = #deps
for _, src in ipairs(files) do
transform(result, src)
end
files = result
if #deps == old_dep_count then
--print("scan", util.tostring(list), util.tostring(suffixes), util.tostring(result))
return result, deps
end
end
end
local function x_identity(self, name, info, value, env, out_deps)
return value
end
local function x_source_list(self, name, info, value, env, out_deps)
local build_id = env:get('BUILD_ID')
local source_files
if build_id then
source_files = filter_structure(build_id, value)
else
source_files = value
end
local sources = resolve_sources(env, source_files, {}, self.Decl.SourceDir)
local source_exts = env:get_list(info.ExtensionKey)
local inputs, ideps = analyze_sources(env, resolve_pass(self.Decl.Pass), sources, source_exts)
if ideps then
util.append_table(out_deps, ideps)
end
return inputs
end
local function x_filter_table(self, name, info, value, env, out_deps)
local build_id = env:get('BUILD_ID')
return flatten_list(build_id, value)
end
local function find_named_node(name_or_dag)
if type(name_or_dag) == "table" then
return name_or_dag:get_dag(current.default_env)
elseif type(name_or_dag) == "string" then
local generator = current.units[name_or_dag]
if not generator then
errorf("unknown node specified: %q", tostring(name_or_dag))
end
return generator:get_dag(current.default_env)
else
errorf("illegal node specified: %q", tostring(name_or_dag))
end
end
-- Special resolver for dependencies in a nested (config-filtered) list.
local function resolve_dependencies(decl, raw_deps, env)
if not raw_deps then
return {}
end
local build_id = env:get('BUILD_ID')
local deps = flatten_list(build_id, raw_deps)
return util.map_in_place(deps, function (i)
if type(i) == "string" then
local n = current.units[i]
if not n then
errorf("%s: Unknown 'Depends' target %q", decl.Name, i)
end
return n
elseif type(i) == "table" and getmetatable(i) and i.Decl then
return i
else
errorf("bad 'Depends' value of type %q", type(i))
end
end)
end
local function x_pass(self, name, info, value, env, out_deps)
return resolve_pass(value)
end
local decl_transformers = {
-- the x_identity data types have already been checked at script time through validate_xxx
["string"] = x_identity,
["table"] = x_identity,
["config"] = x_identity,
["boolean"] = x_identity,
["pass"] = x_pass,
["source_list"] = x_source_list,
["filter_table"] = x_filter_table,
}
-- Create input data for the generator's DAG creation function based on the
-- blueprint passed in when the generator was registered. This is done here
-- centrally rather than in all the different node generators to reduce code
-- duplication and keep the generators miminal. If you need to do something
-- special, you can override create_input_data() in your subclass.
function _nodegen:create_input_data(env)
local decl = self.Decl
local data = {}
local deps = {}
for name, detail in pairs(assert(self.Blueprint)) do
local val = decl[name]
if val then
local xform = decl_transformers[detail.Type]
data[name] = xform(self, name, detail, val, env, deps)
end
end
return data, deps
end
function get_pass(self, name)
if not name then
return nil
end
end
local pattern_cache = {}
local function get_cached_pattern(p)
local v = pattern_cache[p]
if not v then
local comp = '[%w_]+'
local sub_pattern = p:gsub('*', '[%%w_]+')
local platform, tool, variant, subvariant = unitgen.match_build_id(sub_pattern, comp)
v = string.format('^%s%%-%s%%-%s%%-%s$', platform, tool, variant, subvariant)
pattern_cache[p] = v
end
return v
end
local function config_matches(pattern, build_id)
local ptype = type(pattern)
if ptype == "nil" then
return true
elseif ptype == "string" then
local fpattern = get_cached_pattern(pattern)
return build_id:match(fpattern)
elseif ptype == "table" then
for _, pattern_item in ipairs(pattern) do
if config_matches(pattern_item, build_id) then
return true
end
end
return false
else
error("bad 'Config' pattern type: " .. ptype)
end
end
local function make_unit_env(unit)
-- Select an environment for this unit based on its SubConfig tag
-- to support cross compilation.
local env
local subconfig = unit.Decl.SubConfig or current.default_subconfig
if subconfig and current.base_envs then
env = current.base_envs[subconfig]
if Options.VeryVerbose then
if env then
printf("%s: using subconfig %s (%s)", unit.Decl.Name, subconfig, env:get('BUILD_ID'))
else
if current.default_subconfig then
errorf("%s: couldn't find a subconfig env", unit.Decl.Name)
else
printf("%s: no subconfig %s found; using default env", unit.Decl.Name, subconfig)
end
end
end
end
if not env then
env = current.default_env
end
return env:clone()
end
local anon_count = 1
function _nodegen:get_dag(parent_env)
local build_id = parent_env:get('BUILD_ID')
local dag = self.DagCache[build_id]
if not dag then
if build_id:len() > 0 and not config_matches(self.Decl.Config, build_id) then
-- Unit has been filtered out via Config attribute.
-- Create a fresh dummy node for it.
local name
if not self.Decl.Name then
name = string.format("Dummy node %d", anon_count)
else
name = string.format("Dummy node %d for %s", anon_count, self.Decl.Name)
end
anon_count = anon_count + 1
dag = depgraph.make_node {
Env = parent_env,
Pass = resolve_pass(self.Decl.Pass),
Label = name,
}
else
local unit_env = make_unit_env(self)
if self.Decl.Name then
unit_env:set('UNIT_PREFIX', '__' .. self.Decl.Name)
end
-- Before accessing the unit's dependencies, resolve them via filtering.
local deps = resolve_dependencies(self.Decl, self.Decl.Depends, unit_env)
self:configure_env(unit_env, deps)
self:customize_env(unit_env, self.Decl, deps)
local input_data, input_deps = self:create_input_data(unit_env, parent_env)
-- Copy over dependencies which have been pre-resolved
input_data.Depends = deps
for _, dep in util.nil_ipairs(deps) do
input_deps[#input_deps + 1] = dep:get_dag(parent_env)
end
dag = self:create_dag(unit_env, input_data, input_deps, parent_env)
if not dag then
error("create_dag didn't generate a result node")
end
end
self.DagCache[build_id] = dag
end
return dag
end
local _generator = {
Evaluators = {},
}
_generator.__index = _generator
local function new_generator(s)
s = s or {}
s.units = {}
return setmetatable(s, _generator)
end
local function create_unit_map(state, raw_nodes)
-- Build name=>decl mapping
for _, unit in ipairs(raw_nodes) do
assert(unit.Decl)
local name = unit.Decl.Name
if name and type(name) == "string" then
if state.units[name] then
errorf("duplicate unit name: %s", name)
end
state.units[name] = unit
end
end
end
function _generate_dag(args)
local envs = assert(args.Envs)
local raw_nodes = assert(args.Declarations)
local state = new_generator {
base_envs = envs,
root_env = envs["__default"], -- the outmost config's env in a cross-compilation scenario
config = assert(args.Config),
variant = assert(args.Variant),
passes = assert(args.Passes),
}
current = state
create_unit_map(state, raw_nodes)
local subconfigs = state.config.SubConfigs
-- Pick a default environment which is used for
-- 1. Nodes without a SubConfig declaration
-- 2. Nodes with a missing SubConfig declaration
-- 3. All nodes if there are no SubConfigs set for the current config
if subconfigs then
state.default_subconfig = assert(state.config.DefaultSubConfig)
state.default_env = assert(envs[state.default_subconfig], "unknown DefaultSubConfig specified")
else
state.default_env = assert(envs["__default"])
end
local always_lut = util.make_lookup_table(args.AlwaysNodes)
local default_lut = util.make_lookup_table(args.DefaultNodes)
local always_nodes = util.map(args.AlwaysNodes, find_named_node)
local default_nodes = util.map(args.DefaultNodes, find_named_node)
local named_nodes = {}
for name, _ in pairs(state.units) do
named_nodes[name] = find_named_node(name)
end
current = nil
return { always_nodes, default_nodes, named_nodes }
end
function generate_dag(args)
local success, result = xpcall(function () return _generate_dag(args) end, buildfile.syntax_error_catcher)
if success then
return result[1], result[2], result[3]
else
croak("%s", result)
end
end
function resolve_pass(name)
assert(current)
if name then
local p = current.passes[name]
if not p then
syntax_error("%q is not a valid pass name", name)
end
return p
else
return nil
end
end
function get_target(data, suffix, prefix)
local target = data.Target
if not target then
assert(data.Name)
target = "$(OBJECTDIR)/" .. (prefix or "") .. data.Name .. (suffix or "")
end
return target
end
function get_evaluator(name)
return _generator.Evaluators[name]
end
function is_evaluator(name)
if _generator.Evaluators[name] then return true else return false end
end
local common_blueprint = {
Propagate = {
Help = "Declarations to propagate to dependent units",
Type = "filter_table",
},
Depends = {
Help = "Dependencies for this node",
Type = "table", -- handled specially
},
Env = {
Help = "Data to append to the environment for the unit",
Type = "filter_table",
},
ReplaceEnv = {
Help = "Data to replace in the environment for the unit",
Type = "filter_table",
},
Pass = {
Help = "Specify build pass",
Type = "pass",
},
SourceDir = {
Help = "Specify base directory for source files",
Type = "string",
},
Config = {
Help = "Specify configuration this unit will build in",
Type = "config",
},
SubConfig = {
Help = "Specify sub-configuration this unit will build in",
Type = "config",
},
__DagNodes = {
Help = "Internal node to keep track of DAG nodes generated so far",
Type = "table",
}
}
function create_eval_subclass(meta_tbl, base)
base = base or _nodegen
setmetatable(meta_tbl, base)
meta_tbl.__index = meta_tbl
return meta_tbl
end
function add_evaluator(name, meta_tbl, blueprint)
assert(type(name) == "string")
assert(type(meta_tbl) == "table")
assert(type(blueprint) == "table")
-- Set up this metatable as a subclass of _nodegen unless it is already
-- configured.
if not getmetatable(meta_tbl) then
setmetatable(meta_tbl, _nodegen)
meta_tbl.__index = meta_tbl
end
-- Install common blueprint items.
for name, val in pairs(common_blueprint) do
if not blueprint[name] then
blueprint[name] = val
end
end
-- Expand environment shortcuts into options.
for decl_key, env_key in util.nil_pairs(meta_tbl.DeclToEnvMappings) do
blueprint[decl_key] = {
Type = "filter_table",
Help = "Shortcut for environment key " .. env_key,
}
end
for name, val in pairs(blueprint) do
local type_ = assert(val.Type)
if not validators[type_] then
errorf("unsupported blueprint type %q", type_)
end
if val.Type == "source_list" and not val.ExtensionKey then
errorf("%s: source_list must provide ExtensionKey", name)
end
end
-- Record blueprint for use when validating user constructs.
meta_tbl.Keyword = name
meta_tbl.Blueprint = blueprint
-- Store this evaluator under the keyword that will trigger it.
_generator.Evaluators[name] = meta_tbl
end
-- Called when processing build scripts, keywords is something previously
-- registered as an evaluator here.
function evaluate(eval_keyword, data)
local meta_tbl = assert(_generator.Evaluators[eval_keyword])
-- Give the evaluator change to fix up the data before we validate it.
data = meta_tbl:preprocess_data(data)
local object = setmetatable({
DagCache = {}, -- maps BUILD_ID -> dag node
Decl = data
}, meta_tbl)
-- Expose the dag cache to the raw input data so the IDE generator can find it later
data.__DagNodes = object.DagCache
object.__index = object
-- Validate data according to Blueprint settings
object:validate()
return object
end
-- Given a list of strings or nested lists, flatten the structure to a single
-- list of strings while applying configuration filters. Configuration filters
-- match against the current build identifier like this:
--
-- { "a", "b", { "nixfile1", "nixfile2"; Config = "unix-*-*" }, "bar", { "debugfile"; Config = "*-*-debug" }, }
--
-- If 'exclusive' is set, then:
-- If 'build_id' is set, only values _with_ a 'Config' filter are included.
-- If 'build_id' is nil, only values _without_ a 'Config' filter are included.
function flatten_list(build_id, list, exclusive)
if not list then return nil end
local filter_defined = build_id ~= nil
-- Helper function to apply filtering recursively and append results to an
-- accumulator table.
local function iter(node, accum, filtered)
local node_type = type(node)
if node_type == "table" and not getmetatable(node) then
if node.Config then filtered = true end
if not filter_defined or config_matches(node.Config, build_id) then
for _, item in ipairs(node) do
iter(item, accum, filtered)
end
end
elseif not exclusive or (filtered == filter_defined) then
accum[#accum + 1] = node
end
end
local results = {}
iter(list, results, false)
return results
end
-- Conceptually similar to flatten_list(), but retains table structure.
-- Use to keep source tables as they are passed in, to retain nested SourceDir attributes.
local empty_leaf = {} -- constant
function filter_structure(build_id, data, exclusive)
if type(data) == "table" then
if getmetatable(data) then
return data -- it's already a DAG node; use as-is
end
local filtered = data.Config and true or false
if not data.Config or config_matches(data.Config, build_id) then
local result = {}
for k, item in pairs(data) do
if type(k) == "number" then
-- Filter array elements.
result[#result + 1] = filter_structure(build_id, item, filtered)
elseif k ~= "Config" then
-- Copy key-value data through.
result[k] = item
end
end
return result
else
return empty_leaf
end
else
return data
end
end
-- Processes an "Env" table. For each value, the corresponding variable in
-- 'env' is appended to if its "Config" filter matches 'build_id'. If
-- 'build_id' is nil, filtered values are skipped.
function append_filtered_env_vars(env, values_to_append, build_id, exclusive)
for key, val in util.pairs(values_to_append) do
if type(val) == "table" then
local list = flatten_list(build_id, val, exclusive)
for _, subvalue in ipairs(list) do
env:append(key, subvalue)
end
elseif not (exclusive and build_id) then
env:append(key, val)
end
end
end
-- Like append_filtered_env_vars(), but replaces existing variables instead
-- of appending to them.
function replace_filtered_env_vars(env, values_to_replace, build_id, exclusive)
for key, val in util.pairs(values_to_replace) do
if type(val) == "table" then
local list = flatten_list(build_id, val, exclusive)
if #list > 0 then
env:replace(key, list)
end
elseif not (exclusive and build_id) then
env:replace(key, val)
end
end
end
function generate_ide_files(config_tuples, default_names, raw_nodes, env, hints, ide_script)
local state = new_generator { default_env = env }
assert(state.default_env)
create_unit_map(state, raw_nodes)
local backend_fn = assert(ide_backend)
backend_fn(state, config_tuples, raw_nodes, env, default_names, hints, ide_script)
end
function set_ide_backend(backend_fn)
ide_backend = backend_fn
end
-- Expose the DefRule helper which is used to register builder syntax in a
-- simplified way.
function _G.DefRule(ruledef)
local name = assert(ruledef.Name, "Missing Name string in DefRule")
local setup_fn = assert(ruledef.Setup, "Missing Setup function in DefRule " .. name)
local cmd = assert(ruledef.Command, "Missing Command string in DefRule " .. name)
local blueprint = assert(ruledef.Blueprint, "Missing Blueprint in DefRule " .. name)
local mt = create_eval_subclass {}
local annot = ruledef.Annotation
if not annot then
annot = name .. " $(<)"
end
local preproc = ruledef.Preprocess
local function verify_table(v, tag)
if not v then
errorf("No %s returned from DefRule %s", tag, name)
end
if type(v) ~= "table" then
errorf("%s returned from DefRule %s is not a table", tag, name)
end
end
local function make_node(input_files, output_files, env, data, deps, scanner)
return depgraph.make_node {
Env = env,
Label = annot,
Action = cmd,
Pass = data.Pass or resolve_pass(ruledef.Pass),
InputFiles = input_files,
OutputFiles = output_files,
ImplicitInputs = ruledef.ImplicitInputs,
Scanner = scanner,
Dependencies = deps,
}
end
if ruledef.ConfigInvariant then
local cache = {}
function mt:create_dag(env, data, deps)
local setup_data = setup_fn(env, data)
local input_files = setup_data.InputFiles
local output_files = setup_data.OutputFiles
verify_table(input_files, "InputFiles")
verify_table(output_files, "OutputFiles")
local mashup = { }
for _, input in util.nil_ipairs(input_files) do
mashup[#mashup + 1] = input
end
mashup[#mashup + 1] = "@@"
for _, output in util.nil_ipairs(output_files) do
mashup[#mashup + 1] = output
end
mashup[#mashup + 1] = "@@"
for _, implicit_input in util.nil_ipairs(setup_data.ImplicitInputs) do
mashup[#mashup + 1] = implicit_input
end
local key = native.digest_guid(table.concat(mashup, ';'))
local key = util.tostring(key)
if cache[key] then
return cache[key]
else
local node = make_node(input_files, output_files, env, data, deps, setup_data.Scanner)
cache[key] = node
return node
end
end
else
function mt:create_dag(env, data, deps)
local setup_data = setup_fn(env, data)
verify_table(setup_data.InputFiles, "InputFiles")
verify_table(setup_data.OutputFiles, "OutputFiles")
return make_node(setup_data.InputFiles, setup_data.OutputFiles, env, data, deps, setup_data.Scanner)
end
end
if preproc then
function mt:preprocess_data(raw_data)
return preproc(raw_data)
end
end
add_evaluator(name, mt, blueprint)
end
function _nodegen:preprocess_data(data)
return data
end

View File

@ -0,0 +1,50 @@
module(..., package.seeall)
local npath = require "tundra.native.path"
split = npath.split
normalize = npath.normalize
join = npath.join
get_filename_dir = npath.get_filename_dir
get_filename = npath.get_filename
get_extension = npath.get_extension
drop_suffix = npath.drop_suffix
get_filename_base = npath.get_filename_base
is_absolute = npath.is_absolute
function remove_prefix(prefix, fn)
if fn:find(prefix, 1, true) == 1 then
return fn:sub(#prefix + 1)
else
return fn
end
end
function make_object_filename(env, src_fn, suffix)
local object_fn
local src_suffix = get_extension(src_fn):sub(2)
-- Drop leading $(OBJECTDIR)[/\\] in the input filename.
do
local pname = src_fn:match("^%$%(OBJECTDIR%)[/\\](.*)$")
if pname then
object_fn = pname
else
object_fn = src_fn
end
end
-- Compute path under OBJECTDIR we want for the resulting object file.
-- Replace ".." with "dotdot" to avoid creating files outside the
-- object directory. Also salt the generated object name with the source
-- suffix, so that multiple source files with the same base name don't end
-- up clobbering each other (Tundra emits an error for this when checking
-- the DAG)
do
local relative_name = drop_suffix(object_fn:gsub("%.%.", "dotdot"))
object_fn = "$(OBJECTDIR)/$(UNIT_PREFIX)/" .. relative_name .. "__" .. src_suffix .. suffix
end
return object_fn
end

View File

@ -0,0 +1,7 @@
module(..., package.seeall)
local native = require "tundra.native"
function host_platform()
return native.host_platform
end

View File

@ -0,0 +1,57 @@
module(..., package.seeall)
local util = require "tundra.util"
local native = require "tundra.native"
local _scanner_mt = {}
setmetatable(_scanner_mt, { __index = _scanner_mt })
local cpp_scanner_cache = {}
local generic_scanner_cache = {}
function make_cpp_scanner(paths)
local key = table.concat(paths, '\0')
if not cpp_scanner_cache[key] then
local data = { Kind = 'cpp', Paths = paths, Index = #cpp_scanner_cache }
cpp_scanner_cache[key] = setmetatable(data, _scanner_mt)
end
return cpp_scanner_cache[key]
end
function make_generic_scanner(data)
data.Kind = 'generic'
local mashup = { }
local function add_all(l)
for _, value in util.nil_ipairs(l) do
mashup[#mashup + 1] = value
end
end
add_all(data.Paths)
add_all(data.Keywords)
add_all(data.KeywordsNoFollow)
mashup[#mashup + 1] = '!!'
mashup[#mashup + 1] = data.RequireWhitespace and 'y' or 'n'
mashup[#mashup + 1] = data.UseSeparators and 'y' or 'n'
mashup[#mashup + 1] = data.BareMeansSystem and 'y' or 'n'
local key_str = table.concat(mashup, '\001')
local key = native.digest_guid(key_str)
local value = generic_scanner_cache[key]
if not value then
value = data
generic_scanner_cache[key] = data
end
return value
end
function all_scanners()
local scanners = {}
for k, v in pairs(cpp_scanner_cache) do
scanners[v.Index + 1] = v
end
for k, v in pairs(generic_scanner_cache) do
scanners[v.Index + 1] = v
end
return scanners
end

Some files were not shown because too many files have changed in this diff Show More