Compare commits
11 Commits
f1243084c7
...
324735710b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
324735710b | ||
|
|
439747daf0 | ||
|
|
552ef6ce3b | ||
|
|
07d92aed97 | ||
|
|
6c243439a9 | ||
|
|
a42c981fa0 | ||
|
|
8a6f84238e | ||
|
|
bb7187b0c7 | ||
|
|
3c7edc67da | ||
|
|
8d8b3607fc | ||
|
|
a78fe0541a |
514
debug.exs
Normal file
514
debug.exs
Normal file
@ -0,0 +1,514 @@
|
|||||||
|
defmodule Tdd.Debug.TracerData do
|
||||||
|
@moduledoc """
|
||||||
|
Provides functions to sanitize Elixir terms for tracing and JSON serialization.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@max_sanitize_depth 5
|
||||||
|
|
||||||
|
def sanitize_values(list) when is_list(list) do
|
||||||
|
Enum.map(list, &sanitize_value/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_value(val) do
|
||||||
|
do_sanitize(val, @max_sanitize_depth)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_integer(val) or is_float(val) or is_atom(val) or is_boolean(val), do: val
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_binary(val), do: val # Assume strings are fine
|
||||||
|
defp do_sanitize(nil, _depth_limit), do: nil
|
||||||
|
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_pid(val), do: inspect(val)
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_port(val), do: inspect(val)
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_reference(val), do: inspect(val)
|
||||||
|
|
||||||
|
defp do_sanitize(val, _depth_limit) when is_function(val) do
|
||||||
|
try do
|
||||||
|
arity = Function.info(val, :arity) |> elem(1)
|
||||||
|
case Function.info(val, :name) do
|
||||||
|
{:name, name} when is_atom(name) and name != :"" -> "<Function #{Atom.to_string(name)}/#{arity}>"
|
||||||
|
_ -> "<Function/#{arity}>"
|
||||||
|
end
|
||||||
|
catch
|
||||||
|
_, _ -> "<Function>"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_sanitize(val, depth_limit) when is_list(val) do
|
||||||
|
if depth_limit <= 0 do
|
||||||
|
"[...trimmed_list...]"
|
||||||
|
else
|
||||||
|
Enum.map(val, &do_sanitize(&1, depth_limit - 1))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_sanitize(val, depth_limit) when is_map(val) do
|
||||||
|
if depth_limit <= 0 do
|
||||||
|
"%{...trimmed_map...}"
|
||||||
|
else
|
||||||
|
if Map.has_key?(val, :__struct__) do
|
||||||
|
module = val.__struct__
|
||||||
|
# Sanitize only exposed fields, not internal :__meta__ or the :__struct__ key itself
|
||||||
|
data_to_sanitize = val |> Map.delete(:__struct__) |> Map.delete(:__meta__)
|
||||||
|
|
||||||
|
sanitized_fields =
|
||||||
|
data_to_sanitize
|
||||||
|
|> Enum.map(fn {k, v} -> {k, do_sanitize(v, depth_limit - 1)} end)
|
||||||
|
|> Enum.into(%{})
|
||||||
|
%{type: :struct, module: module, fields: sanitized_fields}
|
||||||
|
else # Regular map
|
||||||
|
val
|
||||||
|
|> Enum.map(fn {k, v} -> {do_sanitize(k, depth_limit - 1), do_sanitize(v, depth_limit - 1)} end)
|
||||||
|
|> Enum.into(%{})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_sanitize(val, depth_limit) when is_tuple(val) do
|
||||||
|
if depth_limit <= 0 do
|
||||||
|
"{...trimmed_tuple...}"
|
||||||
|
else
|
||||||
|
val |> Tuple.to_list() |> Enum.map(&do_sanitize(&1, depth_limit - 1)) |> List.to_tuple()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Fallback for other types (e.g., DateTime, Date, Time, NaiveDateTime)
|
||||||
|
defp do_sanitize(val, _depth_limit) do
|
||||||
|
# For common structs that have `to_string` or are Calendar types
|
||||||
|
cond do
|
||||||
|
is_struct(val, DateTime) or is_struct(val, Date) or is_struct(val, Time) or is_struct(val, NaiveDateTime) ->
|
||||||
|
inspect(val) # Jason can handle these if they are ISO8601 strings
|
||||||
|
true ->
|
||||||
|
# Generic struct or unknown type
|
||||||
|
if is_struct(val) do
|
||||||
|
"Struct<#{inspect(val.__struct__)}>" # A simpler representation
|
||||||
|
else
|
||||||
|
inspect(val) # Fallback, converts to string
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
end
|
||||||
|
|
||||||
|
def sanitize_error(exception_instance, stacktrace) do
|
||||||
|
%{
|
||||||
|
type: :error,
|
||||||
|
class: Atom.to_string(exception_instance.__struct__),
|
||||||
|
message: Exception.message(exception_instance),
|
||||||
|
# Sanitize stacktrace to keep it manageable
|
||||||
|
stacktrace: Enum.map(stacktrace, &Exception.format_stacktrace_entry/1) |> Enum.take(15)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule Tdd.Debug do
|
||||||
|
@moduledoc """
|
||||||
|
Provides macros to wrap `def` and `defp` for function call/return tracing.
|
||||||
|
Logs arguments and return values using `IO.inspect`.
|
||||||
|
Builds an in-memory call tree data structure per traced process.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# --- Agent for Tracing State ---
|
||||||
|
@agent_name Tdd.Debug.StateAgent
|
||||||
|
|
||||||
|
def init_agent_if_needed do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil -> Agent.start_link(fn -> MapSet.new() end, name: @agent_name)
|
||||||
|
_pid -> :ok
|
||||||
|
end
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
def add_traced_pid(pid) when is_pid(pid) do
|
||||||
|
init_agent_if_needed()
|
||||||
|
Agent.update(@agent_name, &MapSet.put(&1, pid))
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_traced_pid(pid) when is_pid(pid) do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil -> :ok
|
||||||
|
agent_pid -> Agent.cast(agent_pid, fn state -> MapSet.delete(state, pid) end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def is_pid_traced?(pid) when is_pid(pid) do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil ->
|
||||||
|
false
|
||||||
|
agent_pid ->
|
||||||
|
try do
|
||||||
|
Agent.get(agent_pid, &MapSet.member?(&1, pid), :infinity)
|
||||||
|
rescue
|
||||||
|
_e in [Exit, ArgumentError] -> false # Agent might have died
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Tracing Data Storage ---
|
||||||
|
@doc """
|
||||||
|
Initializes the tracing data structures in the process dictionary.
|
||||||
|
"""
|
||||||
|
def init_trace_storage do
|
||||||
|
Process.put(:tdd_debug_call_stack, [])
|
||||||
|
Process.put(:tdd_debug_session_roots, [])
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Retrieves the collected trace data (a list of root call tree nodes)
|
||||||
|
for the current process and clears it from the process dictionary.
|
||||||
|
Children within each node and the root list itself are reversed to be in chronological order.
|
||||||
|
"""
|
||||||
|
def get_and_clear_trace_data do
|
||||||
|
data = Process.get(:tdd_debug_session_roots, [])
|
||||||
|
# Reverse children recursively and then reverse the list of roots
|
||||||
|
processed_data = Enum.map(data, &reverse_children_recursively/1) |> Enum.reverse()
|
||||||
|
|
||||||
|
Process.delete(:tdd_debug_session_roots)
|
||||||
|
Process.delete(:tdd_debug_call_stack)
|
||||||
|
processed_data
|
||||||
|
end
|
||||||
|
|
||||||
|
defp reverse_children_recursively(node) do
|
||||||
|
if !node.children or node.children == [] do
|
||||||
|
node
|
||||||
|
else
|
||||||
|
reversed_and_processed_children =
|
||||||
|
Enum.map(node.children, &reverse_children_recursively/1)
|
||||||
|
|> Enum.reverse() # Children were added prepending, so reverse for chronological
|
||||||
|
%{node | children: reversed_and_processed_children}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Tracing Control Functions ---
|
||||||
|
@doc "Enables function call tracing for the current process."
|
||||||
|
def enable_tracing do
|
||||||
|
pid_to_trace = self()
|
||||||
|
add_traced_pid(pid_to_trace)
|
||||||
|
init_trace_storage() # Initialize storage for call trees
|
||||||
|
|
||||||
|
ref = Process.monitor(pid_to_trace)
|
||||||
|
Process.spawn(fn ->
|
||||||
|
receive do
|
||||||
|
{:DOWN, ^ref, :process, ^pid_to_trace, _reason} ->
|
||||||
|
remove_traced_pid(pid_to_trace)
|
||||||
|
after
|
||||||
|
3_600_000 -> # 1 hour safety timeout
|
||||||
|
remove_traced_pid(pid_to_trace)
|
||||||
|
end
|
||||||
|
end, [:monitor]) # Changed from [:monitor] to [] as monitor is implicit with Process.monitor
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Disables function call tracing for the current process."
|
||||||
|
def disable_tracing do
|
||||||
|
remove_traced_pid(self())
|
||||||
|
# Note: Does not clear trace data, get_and_clear_trace_data() does that.
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Runs the given 0-arity function with tracing enabled.
|
||||||
|
Returns a tuple: `{{:ok, result} | {:error, {exception, stacktrace}}, trace_data}`.
|
||||||
|
Trace data is a list of call tree root nodes.
|
||||||
|
"""
|
||||||
|
def run(fun) when is_function(fun, 0) do
|
||||||
|
enable_tracing()
|
||||||
|
outcome =
|
||||||
|
try do
|
||||||
|
{:ok, fun.()}
|
||||||
|
rescue
|
||||||
|
kind -> {:error, {kind, __STACKTRACE__}}
|
||||||
|
end
|
||||||
|
trace_data = get_and_clear_trace_data()
|
||||||
|
disable_tracing()
|
||||||
|
# trace_data
|
||||||
|
# |> IO.inspect()
|
||||||
|
binary = JSON.encode!(trace_data)
|
||||||
|
File.write("trace.json", binary)
|
||||||
|
{outcome, trace_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Process Dictionary for Call Depth (used for printing indent) ---
|
||||||
|
defp get_depth, do: Process.get(:tdd_debug_depth, 0)
|
||||||
|
def increment_depth do
|
||||||
|
new_depth = get_depth() + 1
|
||||||
|
Process.put(:tdd_debug_depth, new_depth)
|
||||||
|
new_depth
|
||||||
|
end
|
||||||
|
def decrement_depth do
|
||||||
|
new_depth = max(0, get_depth() - 1)
|
||||||
|
Process.put(:tdd_debug_depth, new_depth)
|
||||||
|
new_depth
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Core Macro Logic ---
|
||||||
|
defmacro __using__(_opts) do
|
||||||
|
quote do
|
||||||
|
import Kernel, except: [def: 1, def: 2, defp: 1, defp: 2]
|
||||||
|
import Tdd.Debug # Import this module's public functions/macros
|
||||||
|
require Tdd.Debug # Ensure this module is compiled for macros
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
defmacro def(call, clauses \\ Keyword.new()) do
|
||||||
|
generate_traced_function(:def, call, clauses, __CALLER__)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
defmacro defp(call, clauses \\ Keyword.new()) do
|
||||||
|
generate_traced_function(:defp, call, clauses, __CALLER__)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp is_simple_variable_ast?(ast_node) do
|
||||||
|
case ast_node do
|
||||||
|
{var_name, _meta, context_module} when is_atom(var_name) and (context_module == nil or is_atom(context_module)) ->
|
||||||
|
var_name != :_ # Ensure it's not the underscore atom itself
|
||||||
|
_ ->
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_traced_function(type, call_ast, clauses, caller_env) do
|
||||||
|
require Macro
|
||||||
|
|
||||||
|
{function_name_ast, meta_call, original_args_patterns_ast_nullable} = call_ast
|
||||||
|
original_args_patterns_ast_list = original_args_patterns_ast_nullable || []
|
||||||
|
|
||||||
|
original_body_ast =
|
||||||
|
(if Keyword.keyword?(clauses) do
|
||||||
|
Keyword.get(clauses, :do, clauses)
|
||||||
|
else
|
||||||
|
clauses
|
||||||
|
end) || quote(do: nil)
|
||||||
|
|
||||||
|
# --- Argument Handling for Logging (Existing Logic) ---
|
||||||
|
mapped_and_generated_vars_tuples =
|
||||||
|
Enum.map(Enum.with_index(original_args_patterns_ast_list), fn {original_pattern_ast, index} ->
|
||||||
|
td_arg_var = Macro.var(String.to_atom("__td_arg_#{index}__"), nil)
|
||||||
|
{final_pattern_for_head, rhs_for_td_arg_assignment} =
|
||||||
|
case original_pattern_ast do
|
||||||
|
{:_, _, _} = underscore_ast -> {underscore_ast, quote(do: :__td_ignored_argument__)}
|
||||||
|
{:=, _meta_assign, [lhs_of_assign, _rhs_of_assign]} = assignment_pattern_ast ->
|
||||||
|
if is_simple_variable_ast?(lhs_of_assign) do
|
||||||
|
{assignment_pattern_ast, lhs_of_assign}
|
||||||
|
else
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_assign_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(assignment_pattern_ast) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
{:\\, _meta_default, [pattern_before_default, _default_value_ast]} = default_arg_pattern_ast ->
|
||||||
|
cond do
|
||||||
|
is_simple_variable_ast?(pattern_before_default) ->
|
||||||
|
{default_arg_pattern_ast, pattern_before_default}
|
||||||
|
match?({:=, _, [lhs_inner_assign, _]}, pattern_before_default) and is_simple_variable_ast?(pattern_before_default |> elem(2) |> Enum.at(0)) ->
|
||||||
|
{:=, _, [lhs_inner_assign, _]} = pattern_before_default
|
||||||
|
{default_arg_pattern_ast, lhs_inner_assign}
|
||||||
|
true ->
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_def_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(default_arg_pattern_ast) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
ast_node ->
|
||||||
|
if is_simple_variable_ast?(ast_node) do
|
||||||
|
{ast_node, ast_node}
|
||||||
|
else
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_pat_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(ast_node) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
assignment_ast = quote do unquote(td_arg_var) = unquote(rhs_for_td_arg_assignment) end
|
||||||
|
{final_pattern_for_head, assignment_ast, td_arg_var}
|
||||||
|
end)
|
||||||
|
|
||||||
|
{new_args_patterns_for_head_list, assignments_for_logging_vars_ast_list, generated_vars_to_log_asts} =
|
||||||
|
if mapped_and_generated_vars_tuples == [] do
|
||||||
|
{[], [], []}
|
||||||
|
else
|
||||||
|
# Transpose list of 3-element tuples into 3 lists
|
||||||
|
list_of_lists = Enum.map(mapped_and_generated_vars_tuples, &Tuple.to_list(&1))
|
||||||
|
[
|
||||||
|
Enum.map(list_of_lists, &List.first(&1)),
|
||||||
|
Enum.map(list_of_lists, &Enum.at(&1, 1)),
|
||||||
|
Enum.map(list_of_lists, &Enum.at(&1, 2))
|
||||||
|
]
|
||||||
|
|> then(fn [a,b,c] -> {a,b,c} end)
|
||||||
|
end
|
||||||
|
|
||||||
|
new_call_ast = {function_name_ast, meta_call, new_args_patterns_for_head_list}
|
||||||
|
|
||||||
|
# --- Variables for Runtime Info ---
|
||||||
|
# These vars will hold string versions of module/function name and arity at runtime
|
||||||
|
# Hygienic vars to avoid collisions.
|
||||||
|
module_name_runtime_var = Macro.var(:__td_module_name__, __MODULE__)
|
||||||
|
printable_fn_name_runtime_var = Macro.var(:__td_printable_fn_name__, __MODULE__)
|
||||||
|
arity_runtime_var = Macro.var(:__td_arity__, __MODULE__)
|
||||||
|
|
||||||
|
# Arity calculation at macro expansion time
|
||||||
|
arity_value = length(original_args_patterns_ast_list)
|
||||||
|
|
||||||
|
traced_body_inner_ast =
|
||||||
|
quote do
|
||||||
|
# --- Resolve Module/Function/Arity Info at Runtime ---
|
||||||
|
unquote(module_name_runtime_var) = Atom.to_string(unquote(caller_env.module))
|
||||||
|
unquote(arity_runtime_var) = unquote(arity_value)
|
||||||
|
|
||||||
|
runtime_resolved_fn_name_val = unquote(function_name_ast) # Resolve var if func name is from a var
|
||||||
|
unquote(printable_fn_name_runtime_var) =
|
||||||
|
if is_atom(runtime_resolved_fn_name_val) do
|
||||||
|
Atom.to_string(runtime_resolved_fn_name_val)
|
||||||
|
else
|
||||||
|
Macro.to_string(runtime_resolved_fn_name_val) # For unquote(var) or operators
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Main Tracing Logic ---
|
||||||
|
if Tdd.Debug.is_pid_traced?(self()) do
|
||||||
|
# --- On Entry: Prepare Data & Log ---
|
||||||
|
unquote_splicing(assignments_for_logging_vars_ast_list) # Assign __td_arg_N__ vars
|
||||||
|
runtime_arg_values_for_node_and_log = [unquote_splicing(generated_vars_to_log_asts)]
|
||||||
|
|
||||||
|
# sanitized_args_for_node = Tdd.Debug.TracerData.sanitize_values(runtime_arg_values_for_node_and_log)
|
||||||
|
sanitized_args_for_node = inspect(runtime_arg_values_for_node_and_log)
|
||||||
|
current_call_print_depth = Tdd.Debug.increment_depth() # For print indent & node depth
|
||||||
|
|
||||||
|
# Create placeholder node, push to call_stack
|
||||||
|
new_node_details = %{
|
||||||
|
id: System.unique_integer([:positive, :monotonic]),
|
||||||
|
# module: unquote(module_name_runtime_var),
|
||||||
|
# function: unquote(printable_fn_name_runtime_var),
|
||||||
|
function: "#{unquote(module_name_runtime_var)}.#{unquote(printable_fn_name_runtime_var)}",
|
||||||
|
# arity: unquote(arity_runtime_var),
|
||||||
|
args: sanitized_args_for_node,
|
||||||
|
depth: current_call_print_depth,
|
||||||
|
# timestamp_enter_monotonic: System.monotonic_time(),
|
||||||
|
children: [], # Will be populated in reverse chronological order
|
||||||
|
result: :__td_not_returned_yet__ # Placeholder
|
||||||
|
}
|
||||||
|
Process.put(:tdd_debug_call_stack, [new_node_details | Process.get(:tdd_debug_call_stack, [])])
|
||||||
|
|
||||||
|
# Logging Call (existing logic)
|
||||||
|
indent = String.duplicate(" ", current_call_print_depth - 1)
|
||||||
|
IO.puts("#{indent}CALL: #{unquote(module_name_runtime_var)}.#{unquote(printable_fn_name_runtime_var)}")
|
||||||
|
IO.puts("#{indent} ARGS: #{inspect(runtime_arg_values_for_node_and_log)}")
|
||||||
|
|
||||||
|
try do
|
||||||
|
# --- Execute Original Body ---
|
||||||
|
result_val = unquote(original_body_ast)
|
||||||
|
|
||||||
|
# --- On Normal Exit: Finalize Node & Log ---
|
||||||
|
[completed_node_placeholder | parent_stack_nodes] = Process.get(:tdd_debug_call_stack)
|
||||||
|
ts_exit_monotonic = System.monotonic_time()
|
||||||
|
# duration_ms = System.convert_time_unit(ts_exit_monotonic - completed_node_placeholder.timestamp_enter_monotonic, :native, :millisecond)
|
||||||
|
sanitized_result_val = Tdd.Debug.TracerData.sanitize_value(result_val)
|
||||||
|
|
||||||
|
finalized_node =
|
||||||
|
completed_node_placeholder
|
||||||
|
# |> Map.put(:timestamp_exit_monotonic, ts_exit_monotonic)
|
||||||
|
# |> Map.put(:duration_ms, duration_ms)
|
||||||
|
|> Map.put(:result, %{type: :ok, value: sanitized_result_val})
|
||||||
|
|
||||||
|
_ = Tdd.Debug.decrement_depth() # For print indent
|
||||||
|
|
||||||
|
# Update call stack: Add finalized_node to parent's children or session_roots
|
||||||
|
new_call_stack_after_success =
|
||||||
|
case parent_stack_nodes do
|
||||||
|
[parent_on_stack | grand_parent_stack_nodes] ->
|
||||||
|
updated_parent = %{parent_on_stack | children: [finalized_node | parent_on_stack.children]}
|
||||||
|
[updated_parent | grand_parent_stack_nodes]
|
||||||
|
[] -> # This was a root call
|
||||||
|
Process.put(:tdd_debug_session_roots, [finalized_node | Process.get(:tdd_debug_session_roots, [])])
|
||||||
|
[] # New call_stack is empty for this branch
|
||||||
|
end
|
||||||
|
Process.put(:tdd_debug_call_stack, new_call_stack_after_success)
|
||||||
|
|
||||||
|
# Logging Return
|
||||||
|
IO.puts("#{indent}RETURN from #{unquote(module_name_runtime_var)}.#{unquote(printable_fn_name_runtime_var)}: #{inspect(result_val)}")
|
||||||
|
result_val # Return actual result
|
||||||
|
|
||||||
|
rescue
|
||||||
|
exception_class_err ->
|
||||||
|
# --- On Error Exit: Finalize Node & Log ---
|
||||||
|
error_instance_err = exception_class_err
|
||||||
|
stacktrace_err = __STACKTRACE__
|
||||||
|
|
||||||
|
[erroring_node_placeholder | parent_stack_nodes_err] = Process.get(:tdd_debug_call_stack)
|
||||||
|
ts_exit_monotonic_err = System.monotonic_time()
|
||||||
|
duration_ms_err = System.convert_time_unit(ts_exit_monotonic_err - erroring_node_placeholder.timestamp_enter_monotonic, :native, :millisecond)
|
||||||
|
sanitized_error_info = Tdd.Debug.TracerData.sanitize_error(error_instance_err, stacktrace_err)
|
||||||
|
|
||||||
|
finalized_error_node =
|
||||||
|
erroring_node_placeholder
|
||||||
|
|> Map.put(:timestamp_exit_monotonic, ts_exit_monotonic_err)
|
||||||
|
|> Map.put(:duration_ms, duration_ms_err)
|
||||||
|
|> Map.put(:result, sanitized_error_info)
|
||||||
|
|
||||||
|
_ = Tdd.Debug.decrement_depth() # For print indent
|
||||||
|
|
||||||
|
# Update call stack: Add finalized_error_node to parent's children or session_roots
|
||||||
|
new_call_stack_after_error =
|
||||||
|
case parent_stack_nodes_err do
|
||||||
|
[parent_on_stack_err | grand_parent_stack_nodes_err] ->
|
||||||
|
updated_parent_err = %{parent_on_stack_err | children: [finalized_error_node | parent_on_stack_err.children]}
|
||||||
|
[updated_parent_err | grand_parent_stack_nodes_err]
|
||||||
|
[] -> # This was a root call that errored
|
||||||
|
Process.put(:tdd_debug_session_roots, [finalized_error_node | Process.get(:tdd_debug_session_roots, [])])
|
||||||
|
[] # New call_stack is empty for this branch
|
||||||
|
end
|
||||||
|
Process.put(:tdd_debug_call_stack, new_call_stack_after_error)
|
||||||
|
|
||||||
|
# Logging Error
|
||||||
|
IO.puts("#{indent}ERROR in #{unquote(module_name_runtime_var)}.#{unquote(printable_fn_name_runtime_var)}: #{inspect(error_instance_err)}")
|
||||||
|
reraise error_instance_err, stacktrace_err # Reraise the original error
|
||||||
|
end
|
||||||
|
else
|
||||||
|
# --- Not Traced: Execute Original Body Directly ---
|
||||||
|
unquote(original_body_ast)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Final Definition ---
|
||||||
|
final_definition_ast =
|
||||||
|
quote location: :keep do
|
||||||
|
Kernel.unquote(type)(
|
||||||
|
unquote(new_call_ast),
|
||||||
|
do: unquote(traced_body_inner_ast)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
final_definition_ast
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
# --- TDD Graph Printing (Kept as it was, assuming it's for a different purpose) ---
|
||||||
|
@doc "Prints a formatted representation of a TDD graph structure."
|
||||||
|
def print_tdd_graph(id, store_module \\ Tdd.Store) do
|
||||||
|
IO.puts("--- TDD Graph (ID: #{id}) ---")
|
||||||
|
do_print_tdd_node(id, 0, MapSet.new(), store_module)
|
||||||
|
IO.puts("------------------------")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_print_tdd_node(id, indent_level, visited, store_module) do
|
||||||
|
prefix = String.duplicate(" ", indent_level)
|
||||||
|
|
||||||
|
if MapSet.member?(visited, id) do
|
||||||
|
IO.puts("#{prefix}ID #{id} -> [Seen, recursive link]")
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
new_visited = MapSet.put(visited, id)
|
||||||
|
case store_module.get_node(id) do # Assumes store_module.get_node/1 exists
|
||||||
|
{:ok, :true_terminal} -> IO.puts("#{prefix}ID #{id} -> TRUE")
|
||||||
|
{:ok, :false_terminal} -> IO.puts("#{prefix}ID #{id} -> FALSE")
|
||||||
|
{:ok, {var, y_id, n_id, dc_id}} ->
|
||||||
|
IO.puts("#{prefix}ID #{id}: IF #{inspect(var)}")
|
||||||
|
IO.puts("#{prefix} ├─ Yes (to ID #{y_id}):")
|
||||||
|
do_print_tdd_node(y_id, indent_level + 2, new_visited, store_module)
|
||||||
|
IO.puts("#{prefix} ├─ No (to ID #{n_id}):")
|
||||||
|
do_print_tdd_node(n_id, indent_level + 2, new_visited, store_module)
|
||||||
|
IO.puts("#{prefix} └─ DC (to ID #{dc_id}):")
|
||||||
|
do_print_tdd_node(dc_id, indent_level + 2, new_visited, store_module)
|
||||||
|
{:error, reason} ->
|
||||||
|
IO.puts("#{prefix}ID #{id}: ERROR - #{reason}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
10
debug.md
10
debug.md
@ -2,7 +2,9 @@ i have a compiler Im writing in elixir. I need to trace execution of logic to p
|
|||||||
I have many tests I want to debug individually but I can run only the full test suite.
|
I have many tests I want to debug individually but I can run only the full test suite.
|
||||||
I want to create a couple of macros/functions that'd enable me to debug my code.
|
I want to create a couple of macros/functions that'd enable me to debug my code.
|
||||||
the scenario I imagine:
|
the scenario I imagine:
|
||||||
before the test I want to debug I write `Tdd.Debug.enable`
|
before the test I want to debug I write `Tdd.Debug.enable` and then :
|
||||||
And after the test line I add `Tdd.Debug.print_and_disable`.
|
as the code is being executed, it prints out a tree of called functions, their arguments and return values, effectively building a stacktrace as its being executed.
|
||||||
The last line prints a tree of called functions, their arguments and return values.
|
first lets design how we'd print this information and if/what should we store in memory to be able to render it in a shape sufficient for good debugging.
|
||||||
We can modify compiler functions.
|
Then lets design an elixir module that compiler modules would 'use'
|
||||||
|
e.g. a Tdd.Debug module that'd have and override for elixir `def`.
|
||||||
|
|
||||||
|
|||||||
308
scratch.exs
Normal file
308
scratch.exs
Normal file
@ -0,0 +1,308 @@
|
|||||||
|
defmodule Tdd.Debug do
|
||||||
|
@moduledoc """
|
||||||
|
Provides macros to wrap `def` and `defp` for simple function call/return tracing.
|
||||||
|
Logs arguments as a list and return values using `IO.inspect`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# --- Agent for Tracing State ---
|
||||||
|
@agent_name Tdd.Debug.StateAgent
|
||||||
|
|
||||||
|
def init_agent_if_needed do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil -> Agent.start_link(fn -> MapSet.new() end, name: @agent_name)
|
||||||
|
_pid -> :ok
|
||||||
|
end
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
def add_traced_pid(pid) when is_pid(pid) do
|
||||||
|
init_agent_if_needed()
|
||||||
|
Agent.update(@agent_name, &MapSet.put(&1, pid))
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_traced_pid(pid) when is_pid(pid) do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil -> :ok
|
||||||
|
agent_pid -> Agent.cast(agent_pid, fn state -> MapSet.delete(state, pid) end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def is_pid_traced?(pid) when is_pid(pid) do
|
||||||
|
case Process.whereis(@agent_name) do
|
||||||
|
nil ->
|
||||||
|
false
|
||||||
|
agent_pid ->
|
||||||
|
try do
|
||||||
|
Agent.get(agent_pid, &MapSet.member?(&1, pid), :infinity)
|
||||||
|
rescue
|
||||||
|
_e in [Exit, ArgumentError] -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Tracing Control Functions ---
|
||||||
|
@doc "Enables function call tracing for the current process."
|
||||||
|
def enable_tracing do
|
||||||
|
pid_to_trace = self()
|
||||||
|
add_traced_pid(pid_to_trace)
|
||||||
|
ref = Process.monitor(pid_to_trace)
|
||||||
|
|
||||||
|
Process.spawn(fn ->
|
||||||
|
receive do
|
||||||
|
{:DOWN, ^ref, :process, ^pid_to_trace, _reason} ->
|
||||||
|
remove_traced_pid(pid_to_trace)
|
||||||
|
after
|
||||||
|
3_600_000 -> # 1 hour safety timeout
|
||||||
|
remove_traced_pid(pid_to_trace)
|
||||||
|
end
|
||||||
|
end, [:monitor])
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Disables function call tracing for the current process."
|
||||||
|
def disable_tracing do
|
||||||
|
remove_traced_pid(self())
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Runs the given 0-arity function with tracing enabled, then disables it."
|
||||||
|
def run(fun) when is_function(fun, 0) do
|
||||||
|
enable_tracing()
|
||||||
|
try do
|
||||||
|
fun.()
|
||||||
|
after
|
||||||
|
disable_tracing()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Process Dictionary for Call Depth ---
|
||||||
|
defp get_depth, do: Process.get(:tdd_debug_depth, 0)
|
||||||
|
def increment_depth do
|
||||||
|
new_depth = get_depth() + 1
|
||||||
|
Process.put(:tdd_debug_depth, new_depth)
|
||||||
|
new_depth
|
||||||
|
end
|
||||||
|
def decrement_depth do
|
||||||
|
new_depth = max(0, get_depth() - 1)
|
||||||
|
Process.put(:tdd_debug_depth, new_depth)
|
||||||
|
new_depth
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- Core Macro Logic ---
|
||||||
|
defmacro __using__(_opts) do
|
||||||
|
quote do
|
||||||
|
import Kernel, except: [def: 1, def: 2, defp: 1, defp: 2]
|
||||||
|
import Tdd.Debug
|
||||||
|
require Tdd.Debug
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
defmacro def(call, clauses \\ Keyword.new()) do
|
||||||
|
generate_traced_function(:def, call, clauses, __CALLER__)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
defmacro defp(call, clauses \\ Keyword.new()) do
|
||||||
|
generate_traced_function(:defp, call, clauses, __CALLER__)
|
||||||
|
end
|
||||||
|
defp is_simple_variable_ast?(ast_node) do
|
||||||
|
case ast_node do
|
||||||
|
{var_name, _meta, _context} when is_atom(var_name) ->
|
||||||
|
var_name != :_
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
defp generate_traced_function(type, call_ast, clauses, caller_env) do
|
||||||
|
require Macro # Good practice
|
||||||
|
|
||||||
|
{function_name_ast, meta_call, original_args_patterns_ast_nullable} = call_ast
|
||||||
|
original_args_patterns_ast_list = original_args_patterns_ast_nullable || []
|
||||||
|
|
||||||
|
original_body_ast =
|
||||||
|
(if Keyword.keyword?(clauses) do
|
||||||
|
Keyword.get(clauses, :do, clauses)
|
||||||
|
else
|
||||||
|
clauses
|
||||||
|
end) || quote(do: nil)
|
||||||
|
|
||||||
|
mapped_and_generated_vars_tuples =
|
||||||
|
Enum.map(Enum.with_index(original_args_patterns_ast_list), fn {original_pattern_ast, index} ->
|
||||||
|
# __td_arg_N__ is for logging, make it hygienic with `nil` context (or __MODULE__)
|
||||||
|
td_arg_var = Macro.var(String.to_atom("__td_arg_#{index}__"), nil)
|
||||||
|
|
||||||
|
{final_pattern_for_head, rhs_for_td_arg_assignment} =
|
||||||
|
case original_pattern_ast do
|
||||||
|
# 1. Ignored variable: `_`
|
||||||
|
# AST: {:_, meta, context_module_or_nil}
|
||||||
|
{:_, _, _} = underscore_ast ->
|
||||||
|
{underscore_ast, quote(do: :__td_ignored_argument__)}
|
||||||
|
|
||||||
|
# 2. Assignment pattern: `var = pattern` or `var = _`
|
||||||
|
# AST: {:=, meta, [lhs, rhs_of_assign]}
|
||||||
|
{:=, _meta_assign, [lhs_of_assign, _rhs_of_assign]} = assignment_pattern_ast ->
|
||||||
|
if is_simple_variable_ast?(lhs_of_assign) do
|
||||||
|
{assignment_pattern_ast, lhs_of_assign} # Head uses `var = pattern`, log `var`
|
||||||
|
else
|
||||||
|
# LHS is complex (e.g., `%{key: v} = pattern`), capture the whole value.
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_assign_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(assignment_pattern_ast) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
|
||||||
|
# 3. Default argument: `pattern_before_default \\ default_value`
|
||||||
|
# AST: {:\|, meta, [pattern_before_default, default_value_ast]}
|
||||||
|
{:\\, _meta_default, [pattern_before_default, _default_value_ast]} = default_arg_pattern_ast ->
|
||||||
|
cond do
|
||||||
|
# 3a. `var \\ default`
|
||||||
|
is_simple_variable_ast?(pattern_before_default) ->
|
||||||
|
{default_arg_pattern_ast, pattern_before_default}
|
||||||
|
|
||||||
|
# 3b. `(var = inner_pattern) \\ default`
|
||||||
|
match?({:=, _, [lhs_inner_assign, _]}, pattern_before_default) and
|
||||||
|
is_simple_variable_ast?(pattern_before_default |> elem(2) |> Enum.at(0)) ->
|
||||||
|
{:=, _, [lhs_inner_assign, _]}= pattern_before_default
|
||||||
|
# `lhs_inner_assign` is the var on the left of `=`
|
||||||
|
{default_arg_pattern_ast, lhs_inner_assign}
|
||||||
|
|
||||||
|
# 3c. `(complex_pattern) \\ default` or `(_ = inner_pattern) \\ default` etc.
|
||||||
|
true ->
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_def_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(default_arg_pattern_ast) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
|
||||||
|
# 4. Simple variable `var` (checked using our helper)
|
||||||
|
# or other complex patterns/literals not caught above.
|
||||||
|
ast_node ->
|
||||||
|
if is_simple_variable_ast?(ast_node) do
|
||||||
|
{ast_node, ast_node} # Head uses `var`, log `var`
|
||||||
|
else
|
||||||
|
# It's a complex pattern (e.g., `%{a:x}`, `[h|t]`) or a literal not assignable to.
|
||||||
|
captured_val_var = Macro.unique_var(String.to_atom("tdc_pat_#{index}"), Elixir)
|
||||||
|
new_head_pattern = quote do unquote(captured_val_var) = unquote(ast_node) end
|
||||||
|
{new_head_pattern, captured_val_var}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
assignment_ast = quote do unquote(td_arg_var) = unquote(rhs_for_td_arg_assignment) end
|
||||||
|
{final_pattern_for_head, assignment_ast, td_arg_var}
|
||||||
|
end)
|
||||||
|
|
||||||
|
{new_args_patterns_for_head_list, assignments_for_logging_vars_ast_list, generated_vars_to_log_asts} =
|
||||||
|
if mapped_and_generated_vars_tuples == [],
|
||||||
|
do: {[], [], []},
|
||||||
|
else: mapped_and_generated_vars_tuples |> Enum.map(&Tuple.to_list(&1)) |> Enum.zip()|> Enum.map(&Tuple.to_list(&1))
|
||||||
|
|> then(fn [a, b, c] -> {a, b, c} end)
|
||||||
|
# Enum.unzip(mapped_and_generated_vars_tuples)
|
||||||
|
|
||||||
|
new_call_ast = {function_name_ast, meta_call, new_args_patterns_for_head_list}
|
||||||
|
|
||||||
|
traced_body_inner_ast =
|
||||||
|
quote do
|
||||||
|
unquote_splicing(assignments_for_logging_vars_ast_list)
|
||||||
|
|
||||||
|
if Tdd.Debug.is_pid_traced?(self()) do
|
||||||
|
current_print_depth = Tdd.Debug.increment_depth()
|
||||||
|
indent = String.duplicate(" ", current_print_depth - 1)
|
||||||
|
runtime_arg_values = [unquote_splicing(generated_vars_to_log_asts)]
|
||||||
|
|
||||||
|
actual_module_name_str = Atom.to_string(unquote(caller_env.module))
|
||||||
|
|
||||||
|
# The function_name_ast is resolved at macro expansion time.
|
||||||
|
# If it's `def foo(...)`, `unquote(function_name_ast)` becomes `:foo`.
|
||||||
|
# If `def unquote(name_var)(...)`, it resolves `name_var`.
|
||||||
|
resolved_fn_name = unquote(function_name_ast)
|
||||||
|
printable_function_name_str =
|
||||||
|
if is_atom(resolved_fn_name) do
|
||||||
|
Atom.to_string(resolved_fn_name)
|
||||||
|
else
|
||||||
|
Macro.to_string(resolved_fn_name) # For complex names / operators if AST passed
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.puts(
|
||||||
|
"#{indent}CALL: #{actual_module_name_str}.#{printable_function_name_str}"
|
||||||
|
)
|
||||||
|
IO.puts(
|
||||||
|
"#{indent} ARGS: #{inspect(runtime_arg_values)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try do
|
||||||
|
result = unquote(original_body_ast)
|
||||||
|
_ = Tdd.Debug.decrement_depth()
|
||||||
|
IO.puts(
|
||||||
|
"#{indent}RETURN from #{actual_module_name_str}.#{printable_function_name_str}: #{inspect(result)}"
|
||||||
|
)
|
||||||
|
result
|
||||||
|
rescue
|
||||||
|
exception_class ->
|
||||||
|
error_instance = exception_class
|
||||||
|
stacktrace = __STACKTRACE__
|
||||||
|
_ = Tdd.Debug.decrement_depth()
|
||||||
|
IO.puts(
|
||||||
|
"#{indent}ERROR in #{actual_module_name_str}.#{printable_function_name_str}: #{inspect(error_instance)}"
|
||||||
|
)
|
||||||
|
reraise error_instance, stacktrace
|
||||||
|
end
|
||||||
|
else
|
||||||
|
unquote(original_body_ast)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
final_definition_ast =
|
||||||
|
quote location: :keep do
|
||||||
|
Kernel.unquote(type)(
|
||||||
|
unquote(new_call_ast),
|
||||||
|
do: unquote(traced_body_inner_ast)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
final_definition_ast
|
||||||
|
end
|
||||||
|
|
||||||
|
# --- TDD Graph Printing (Kept as it was, not directly related to call tracing simplification) ---
|
||||||
|
@doc "Prints a formatted representation of a TDD graph structure."
|
||||||
|
def print_tdd_graph(id, store_module \\ Tdd.Store) do
|
||||||
|
IO.puts("--- TDD Graph (ID: #{id}) ---")
|
||||||
|
do_print_tdd_node(id, 0, MapSet.new(), store_module)
|
||||||
|
IO.puts("------------------------")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_print_tdd_node(id, indent_level, visited, store_module) do
|
||||||
|
prefix = String.duplicate(" ", indent_level)
|
||||||
|
|
||||||
|
if MapSet.member?(visited, id) do
|
||||||
|
IO.puts("#{prefix}ID #{id} -> [Seen, recursive link]")
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
new_visited = MapSet.put(visited, id)
|
||||||
|
case store_module.get_node(id) do # Assumes store_module.get_node/1 exists
|
||||||
|
{:ok, :true_terminal} -> IO.puts("#{prefix}ID #{id} -> TRUE")
|
||||||
|
{:ok, :false_terminal} -> IO.puts("#{prefix}ID #{id} -> FALSE")
|
||||||
|
{:ok, {var, y_id, n_id, dc_id}} ->
|
||||||
|
IO.puts("#{prefix}ID #{id}: IF #{inspect(var)}")
|
||||||
|
IO.puts("#{prefix} ├─ Yes (to ID #{y_id}):")
|
||||||
|
do_print_tdd_node(y_id, indent_level + 2, new_visited, store_module)
|
||||||
|
IO.puts("#{prefix} ├─ No (to ID #{n_id}):")
|
||||||
|
do_print_tdd_node(n_id, indent_level + 2, new_visited, store_module)
|
||||||
|
IO.puts("#{prefix} └─ DC (to ID #{dc_id}):")
|
||||||
|
do_print_tdd_node(dc_id, indent_level + 2, new_visited, store_module)
|
||||||
|
{:error, reason} ->
|
||||||
|
IO.puts("#{prefix}ID #{id}: ERROR - #{reason}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
|
||||||
|
defmodule Asd do
|
||||||
|
use Tdd.Debug
|
||||||
|
def kekistan(dupsko, {:sommething, _}) do
|
||||||
|
IO.inspect("inside ")
|
||||||
|
dupsko + 1
|
||||||
|
end
|
||||||
|
end
|
||||||
|
Process.sleep(100)
|
||||||
|
Tdd.Debug.enable_tracing()
|
||||||
|
# Asd.kekistan(1, 2)
|
||||||
|
Asd.kekistan(1, {:sommething, 2})
|
||||||
Loading…
x
Reference in New Issue
Block a user