changed CHANGELOG.md
 
@@ -1,5 1,13 @@
1
1
## CHANGELOG
2
2
3
4
### `v1.7.0`
5
6
* Support for "Other" (non-web) Transactions. [#84](https://github.com/newrelic/elixir_agent/pull/84)
7
* Calculate and report Apdex metric. [#87](https://github.com/newrelic/elixir_agent/pull/87)
8
9
-------
10
3
11
### `v1.6.2`
4
12
5
13
* Improve error logging when encountering a bad DT payload
changed VERSION
 
@@ -1 1 @@
1
- 1.6.2
1
1.7.0
changed hex_metadata.config
 
@@ -22,7 22,6 @@
22
22
<<"lib/new_relic/error/reporter.ex">>,
23
23
<<"lib/new_relic/error/supervisor.ex">>,<<"lib/new_relic/error/trace.ex">>,
24
24
<<"lib/new_relic/harvest">>,<<"lib/new_relic/harvest/collector">>,
25
- <<"lib/new_relic/harvest/collector/.DS_Store">>,
26
25
<<"lib/new_relic/harvest/collector/agent_run.ex">>,
27
26
<<"lib/new_relic/harvest/collector/connect.ex">>,
28
27
<<"lib/new_relic/harvest/collector/custom_event">>,
 
@@ -59,6 58,7 @@
59
58
<<"lib/new_relic/tracer.ex">>,<<"lib/new_relic/tracer/macro.ex">>,
60
59
<<"lib/new_relic/tracer/report.ex">>,<<"lib/new_relic/transaction">>,
61
60
<<"lib/new_relic/transaction.ex">>,
61
<<"lib/new_relic/transaction/complete.ex">>,
62
62
<<"lib/new_relic/transaction/error_handler.ex">>,
63
63
<<"lib/new_relic/transaction/event.ex">>,
64
64
<<"lib/new_relic/transaction/monitor.ex">>,
 
@@ -66,8 66,9 @@
66
66
<<"lib/new_relic/transaction/reporter.ex">>,
67
67
<<"lib/new_relic/transaction/supervisor.ex">>,
68
68
<<"lib/new_relic/transaction/trace.ex">>,<<"lib/new_relic/util">>,
69
- <<"lib/new_relic/util.ex">>,<<"lib/new_relic/util/attr_store.ex">>,
70
- <<"lib/new_relic/util/error.ex">>,<<"lib/new_relic/util/event.ex">>,
69
<<"lib/new_relic/util.ex">>,<<"lib/new_relic/util/apdex.ex">>,
70
<<"lib/new_relic/util/attr_store.ex">>,<<"lib/new_relic/util/error.ex">>,
71
<<"lib/new_relic/util/event.ex">>,
71
72
<<"lib/new_relic/util/priority_queue.ex">>,<<"mix.exs">>,<<"README.md">>,
72
73
<<"CHANGELOG.md">>,<<"VERSION">>]}.
73
74
{<<"licenses">>,[<<"Apache 2.0">>]}.
 
@@ -89,4 90,4 @@
89
90
{<<"optional">>,false},
90
91
{<<"repository">>,<<"hexpm">>},
91
92
{<<"requirement">>,<<"~> 1.0">>}]]}.
92
- {<<"version">>,<<"1.6.2">>}.
93
{<<"version">>,<<"1.7.0">>}.
changed lib/new_relic.ex
 
@@ -13,7 13,7 @@ defmodule NewRelic do
13
13
in the Transaction list.
14
14
15
15
```elixir
16
- NewRelic.set_transaction_name("Plug/custom/transaction/name")
16
NewRelic.set_transaction_name("/Plug/custom/transaction/name")
17
17
```
18
18
"""
19
19
defdelegate set_transaction_name(name), to: NewRelic.Transaction.Reporter
 
@@ -30,6 30,33 @@ defmodule NewRelic do
30
30
@doc false
31
31
defdelegate incr_attributes(attrs), to: NewRelic.Transaction.Reporter
32
32
33
@doc """
34
Start a new "Other" Transaction.
35
36
This will begin monitoring the current process as an "Other" Transaction
37
(ie: Not a "Web" Transaction). The first argument will be considered
38
the "category", the second is the "name".
39
40
Examples:
41
42
```elixir
43
NewRelic.start_transaction("GenStage", "MyConsumer/EventType")
44
NewRelic.start_transaction("Task", "TaskName")
45
```
46
47
**Notes:**
48
49
* Don't use this to track Web Transactions - for that,
50
`use NewRelic.Transaction` in your Plug pipeline so that we can properly
51
categorize as Web Transactions in the UI.
52
* Do _not_ use this for processes that live a very long time, doing so
53
will risk a memory leak tracking attributes in the transaction!
54
* You can't start a new transaction within an existing one. Any process
55
spawned inside a transaction belongs to that transaction.
56
"""
57
@spec start_transaction(String.t(), String.t()) :: none()
58
defdelegate start_transaction(category, name), to: NewRelic.Transaction
59
33
60
@doc """
34
61
Store information about the type of work the current span is doing.
changed lib/new_relic/distributed_trace.ex
 
@@ -4,6 4,7 @@ defmodule NewRelic.DistributedTrace do
4
4
@moduledoc false
5
5
6
6
alias NewRelic.DistributedTrace.{Context, Tracker}
7
alias NewRelic.Harvest.Collector.AgentRun
7
8
alias NewRelic.Transaction
8
9
9
10
def accept_distributed_trace_payload(:http, conn) do
 
@@ -24,6 25,86 @@ defmodule NewRelic.DistributedTrace do
24
25
end
25
26
end
26
27
28
def generate_new_context() do
29
{priority, sampled} = generate_sampling()
30
31
%Context{
32
account_id: AgentRun.account_id(),
33
app_id: AgentRun.primary_application_id(),
34
trust_key: AgentRun.trusted_account_key(),
35
priority: priority,
36
sampled: sampled
37
}
38
end
39
40
def track_transaction(context, transport_type: type) do
41
context
42
|> assign_transaction_guid()
43
|> report_attributes(transport_type: type)
44
|> convert_to_outbound()
45
|> set_tracing_context()
46
end
47
48
def report_attributes(
49
%Context{parent_id: nil} = context,
50
transport_type: _type
51
) do
52
[
53
guid: context.guid,
54
traceId: context.guid,
55
priority: context.priority,
56
sampled: context.sampled
57
]
58
|> NewRelic.add_attributes()
59
60
context
61
end
62
63
def report_attributes(context, transport_type: type) do
64
[
65
"parent.type": context.type,
66
"parent.app": context.app_id,
67
"parent.account": context.account_id,
68
"parent.transportType": type,
69
"parent.transportDuration": transport_duration(context.timestamp),
70
parentId: context.parent_id,
71
parentSpanId: context.span_guid,
72
guid: context.guid,
73
traceId: context.trace_id,
74
priority: context.priority,
75
sampled: context.sampled
76
]
77
|> NewRelic.add_attributes()
78
79
context
80
end
81
82
def convert_to_outbound(%Context{parent_id: nil} = context) do
83
%Context{
84
account_id: AgentRun.account_id(),
85
app_id: AgentRun.primary_application_id(),
86
parent_id: nil,
87
trust_key: context.trust_key,
88
guid: context.guid,
89
trace_id: context.guid,
90
priority: context.priority,
91
sampled: context.sampled
92
}
93
end
94
95
def convert_to_outbound(%Context{} = context) do
96
%Context{
97
account_id: AgentRun.account_id(),
98
app_id: AgentRun.primary_application_id(),
99
parent_id: context.guid,
100
trust_key: context.trust_key,
101
guid: context.guid,
102
trace_id: context.trace_id,
103
priority: context.priority,
104
sampled: context.sampled
105
}
106
end
107
27
108
def set_tracing_context(context) do
28
109
Tracker.store(self(), context: context)
29
110
end
 
@@ -87,6 168,23 @@ defmodule NewRelic.DistributedTrace do
87
168
Process.put(:nr_current_span, previous)
88
169
end
89
170
171
defp generate_sampling() do
172
case {generate_sample?(), generate_priority()} do
173
{true, priority} -> {priority 1, true}
174
{false, priority} -> {priority, false}
175
end
176
end
177
178
defp generate_sample?() do
179
NewRelic.DistributedTrace.BackoffSampler.sample?()
180
end
181
182
defp generate_priority, do: :rand.uniform() |> Float.round(6)
183
184
def assign_transaction_guid(context) do
185
Map.put(context, :guid, generate_guid())
186
end
187
90
188
def generate_guid(), do: :crypto.strong_rand_bytes(8) |> Base.encode16() |> String.downcase()
91
189
def generate_guid(pid: pid), do: encode_guid([pid, node()])
92
190
def generate_guid(pid: pid, label: label, ref: ref), do: encode_guid([label, ref, pid, node()])
 
@@ -106,4 204,8 @@ defmodule NewRelic.DistributedTrace do
106
204
|> String.slice(0..4)
107
205
|> String.downcase()
108
206
end
207
208
defp transport_duration(context_start_time) do
209
(System.system_time(:millisecond) - context_start_time) / 1_000
210
end
109
211
end
changed lib/new_relic/distributed_trace/plug.ex
 
@@ -29,10 29,7 @@ defmodule NewRelic.DistributedTrace.Plug do
29
29
30
30
def trace(conn, true) do
31
31
determine_context(conn)
32
- |> assign_transaction_guid
33
- |> report_attributes
34
- |> convert_to_outbound
35
- |> DistributedTrace.set_tracing_context()
32
|> DistributedTrace.track_transaction(transport_type: "HTTP")
36
33
37
34
conn
38
35
|> put_private(:newrelic_dt_instrumented, true)
 
@@ -49,84 46,10 @@ defmodule NewRelic.DistributedTrace.Plug do
49
46
%Context{} = context <- restrict_access(context) do
50
47
context
51
48
else
52
- _ -> generate_new_context()
49
_ -> DistributedTrace.generate_new_context()
53
50
end
54
51
end
55
52
56
- defp assign_transaction_guid(context) do
57
- Map.put(context, :guid, DistributedTrace.generate_guid())
58
- end
59
-
60
- defp report_attributes(%Context{parent_id: nil} = context) do
61
- [
62
- guid: context.guid,
63
- traceId: context.guid,
64
- priority: context.priority,
65
- sampled: context.sampled
66
- ]
67
- |> NewRelic.add_attributes()
68
-
69
- context
70
- end
71
-
72
- defp report_attributes(context) do
73
- [
74
- "parent.type": context.type,
75
- "parent.app": context.app_id,
76
- "parent.account": context.account_id,
77
- "parent.transportType": "HTTP",
78
- "parent.transportDuration": transport_duration(context.timestamp),
79
- parentId: context.parent_id,
80
- parentSpanId: context.span_guid,
81
- guid: context.guid,
82
- traceId: context.trace_id,
83
- priority: context.priority,
84
- sampled: context.sampled
85
- ]
86
- |> NewRelic.add_attributes()
87
-
88
- context
89
- end
90
-
91
- defp convert_to_outbound(%Context{parent_id: nil} = context) do
92
- %Context{
93
- account_id: AgentRun.account_id(),
94
- app_id: AgentRun.primary_application_id(),
95
- parent_id: nil,
96
- trust_key: context.trust_key,
97
- guid: context.guid,
98
- trace_id: context.guid,
99
- priority: context.priority,
100
- sampled: context.sampled
101
- }
102
- end
103
-
104
- defp convert_to_outbound(%Context{} = context) do
105
- %Context{
106
- account_id: AgentRun.account_id(),
107
- app_id: AgentRun.primary_application_id(),
108
- parent_id: context.guid,
109
- trust_key: context.trust_key,
110
- guid: context.guid,
111
- trace_id: context.trace_id,
112
- priority: context.priority,
113
- sampled: context.sampled
114
- }
115
- end
116
-
117
- defp generate_new_context() do
118
- {priority, sampled} = generate_sampling()
119
-
120
- %Context{
121
- account_id: AgentRun.account_id(),
122
- app_id: AgentRun.primary_application_id(),
123
- trust_key: AgentRun.trusted_account_key(),
124
- priority: priority,
125
- sampled: sampled
126
- }
127
- end
128
-
129
- @doc false
130
53
def restrict_access(context) do
131
54
if (context.trust_key || context.account_id) == AgentRun.trusted_account_key() do
132
55
context
 
@@ -134,21 57,4 @@ defmodule NewRelic.DistributedTrace.Plug do
134
57
:restricted
135
58
end
136
59
end
137
-
138
- defp generate_sampling() do
139
- case {generate_sample?(), generate_priority()} do
140
- {true, priority} -> {priority 1, true}
141
- {false, priority} -> {priority, false}
142
- end
143
- end
144
-
145
- defp generate_sample?() do
146
- NewRelic.DistributedTrace.BackoffSampler.sample?()
147
- end
148
-
149
- defp generate_priority, do: :rand.uniform() |> Float.round(6)
150
-
151
- defp transport_duration(context_start_time) do
152
- (System.system_time(:millisecond) - context_start_time) / 1_000
153
- end
154
60
end
changed lib/new_relic/error/event.ex
 
@@ -8,9 8,8 @@ defmodule NewRelic.Error.Event do
8
8
duration: nil,
9
9
queue_duration: nil,
10
10
database_duration: nil,
11
- http_response_code: nil,
12
- request_method: nil,
13
- user_attributes: %{}
11
user_attributes: %{},
12
agent_attributes: %{}
14
13
15
14
@moduledoc false
16
15
 
@@ -32,10 31,21 @@ defmodule NewRelic.Error.Event do
32
31
databaseDuration: error.database_duration
33
32
},
34
33
NewRelic.Util.Event.process_event(error.user_attributes),
35
- _agent_attributes = %{
36
- httpResponseCode: error.http_response_code,
37
- "request.headers.method": error.request_method
38
- }
34
format_agent_attributes(error.agent_attributes)
39
35
]
40
36
end
37
38
def format_agent_attributes(%{
39
http_response_code: http_response_code,
40
request_method: request_method
41
}) do
42
%{
43
httpResponseCode: http_response_code,
44
"request.headers.method": request_method
45
}
46
end
47
48
def format_agent_attributes(_agent_attributes) do
49
%{}
50
end
41
51
end
changed lib/new_relic/error/reporter.ex
 
@@ -30,7 30,7 @@ defmodule NewRelic.Error.Reporter do
30
30
message: exception_reason,
31
31
expected: expected,
32
32
stack_trace: exception_stacktrace,
33
- transaction_name: "WebTransaction/Elixir/ElixirProcess//#{process_name}",
33
transaction_name: "OtherTransaction/Elixir/ElixirProcess//#{process_name}",
34
34
user_attributes:
35
35
Map.merge(automatic_attributes, %{
36
36
process: process_name
 
@@ -42,7 42,7 @@ defmodule NewRelic.Error.Reporter do
42
42
error_class: inspect(exception_type),
43
43
error_message: exception_reason,
44
44
expected: expected,
45
- transaction_name: "WebTransaction/Elixir/ElixirProcess//#{process_name}",
45
transaction_name: "OtherTransaction/Elixir/ElixirProcess//#{process_name}",
46
46
user_attributes:
47
47
Map.merge(automatic_attributes, %{
48
48
process: process_name,
changed lib/new_relic/error/trace.ex
 
@@ -6,7 6,7 @@ defmodule NewRelic.Error.Trace do
6
6
error_type: nil,
7
7
cat_guid: "",
8
8
stack_trace: nil,
9
- request_uri: nil,
9
agent_attributes: %{},
10
10
user_attributes: %{}
11
11
12
12
@moduledoc false
 
@@ -23,8 23,7 @@ defmodule NewRelic.Error.Trace do
23
23
error.error_type,
24
24
%{
25
25
stack_trace: error.stack_trace,
26
- request_uri: error.request_uri,
27
- agentAttributes: %{},
26
agentAttributes: format_agent_attributes(error.agent_attributes),
28
27
userAttributes: format_user_attributes(error.user_attributes),
29
28
intrinsics: %{"error.expected": error.expected}
30
29
},
 
@@ -32,6 31,14 @@ defmodule NewRelic.Error.Trace do
32
31
]
33
32
end
34
33
34
defp format_agent_attributes(%{request_uri: request_uri}) do
35
%{request_uri: request_uri}
36
end
37
38
defp format_agent_attributes(_agent_attributes) do
39
%{}
40
end
41
35
42
defp format_user_attributes(attrs) do
36
43
Enum.into(attrs, %{}, fn {k, v} ->
37
44
(String.Chars.impl_for(v) && {k, v}) || {k, inspect(v)}
changed lib/new_relic/harvest/collector/agent_run.ex
 
@@ -78,6 78,8 @@ defmodule NewRelic.Harvest.Collector.AgentRun do
78
78
store(:span_event_harvest_cycle, span_event["report_period_in_seconds"] * 1000)
79
79
80
80
store(:data_report_period, state["data_report_period"] * 1000)
81
82
store(:apdex_t, state["apdex_t"])
81
83
end
82
84
83
85
defp store_agent_run(_error), do: :ignore
changed lib/new_relic/harvest/collector/metric_data.ex
 
@@ -33,6 33,26 @@ defmodule NewRelic.Harvest.Collector.MetricData do
33
33
}
34
34
]
35
35
36
def transform({:other_transaction, name}, duration_s: duration_s),
37
do: [
38
%Metric{
39
name: :"OtherTransaction/all",
40
call_count: 1,
41
total_call_time: duration_s,
42
total_exclusive_time: duration_s,
43
min_call_time: duration_s,
44
max_call_time: duration_s
45
},
46
%Metric{
47
name: join(["OtherTransaction", name]),
48
call_count: 1,
49
total_call_time: duration_s,
50
total_exclusive_time: duration_s,
51
min_call_time: duration_s,
52
max_call_time: duration_s
53
}
54
]
55
36
56
def transform(
37
57
{:caller, parent_type, parent_account_id, parent_app_id, transport_type},
38
58
duration_s: duration_s
 
@@ -124,6 144,15 @@ defmodule NewRelic.Harvest.Collector.MetricData do
124
144
total_call_time: utilization
125
145
}
126
146
147
def transform(:apdex, apdex: :satisfying, threshold: t),
148
do: %Metric{name: :Apdex, call_count: 1, min_call_time: t, max_call_time: t}
149
150
def transform(:apdex, apdex: :tolerating, threshold: t),
151
do: %Metric{name: :Apdex, total_call_time: 1, min_call_time: t, max_call_time: t}
152
153
def transform(:apdex, apdex: :frustrating, threshold: t),
154
do: %Metric{name: :Apdex, total_exclusive_time: 1, min_call_time: t, max_call_time: t}
155
127
156
def transform({:supportability, :error_event}, error_count: error_count),
128
157
do: [
129
158
%Metric{
 
@@ -168,12 197,5 @@ defmodule NewRelic.Harvest.Collector.MetricData do
168
197
}
169
198
]
170
199
171
- defp join(segments) when is_list(segments) do
172
- segments
173
- |> Enum.filter(& &1)
174
- |> Enum.map(&to_string/1)
175
- |> Enum.map(&String.replace_leading(&1, "/", ""))
176
- |> Enum.map(&String.replace_trailing(&1, "/", ""))
177
- |> Enum.join("/")
178
- end
200
defp join(segments), do: NewRelic.Util.metric_join(segments)
179
201
end
changed lib/new_relic/transaction.ex
 
@@ -45,6 45,14 @@ defmodule NewRelic.Transaction do
45
45
NewRelic.DistributedTrace.Tracker.cleanup(self())
46
46
NewRelic.Transaction.Plug.add_stop_attrs(conn)
47
47
NewRelic.Transaction.Reporter.fail(error)
48
- NewRelic.Transaction.Reporter.stop(conn)
48
NewRelic.Transaction.Reporter.complete()
49
end
50
51
@doc false
52
def start_transaction(category, name) do
53
NewRelic.Transaction.Reporter.start_other_transaction(category, name)
54
55
NewRelic.DistributedTrace.generate_new_context()
56
|> NewRelic.DistributedTrace.track_transaction(transport_type: "Other")
49
57
end
50
58
end
added lib/new_relic/transaction/complete.ex
 
@@ -0,0 1,484 @@
1
defmodule NewRelic.Transaction.Complete do
2
@moduledoc false
3
4
alias NewRelic.Util
5
alias NewRelic.Harvest.Collector
6
alias NewRelic.DistributedTrace
7
alias NewRelic.Transaction
8
9
def run(tx_attrs, pid) do
10
{tx_segments, tx_attrs, tx_error, span_events, apdex} = gather_transaction_info(tx_attrs, pid)
11
12
report_transaction_event(tx_attrs)
13
report_transaction_trace(tx_attrs, tx_segments)
14
report_transaction_error_event(tx_attrs, tx_error)
15
report_transaction_metric(tx_attrs)
16
report_aggregate(tx_attrs)
17
report_caller_metric(tx_attrs)
18
report_apdex_metric(apdex)
19
report_span_events(span_events)
20
end
21
22
defp gather_transaction_info(tx_attrs, pid) do
23
tx_attrs
24
|> transform_name_attrs
25
|> transform_time_attrs
26
|> extract_transaction_info(pid)
27
end
28
29
defp transform_name_attrs(%{custom_name: name} = tx), do: Map.put(tx, :name, name)
30
defp transform_name_attrs(%{framework_name: name} = tx), do: Map.put(tx, :name, name)
31
defp transform_name_attrs(%{plug_name: name} = tx), do: Map.put(tx, :name, name)
32
defp transform_name_attrs(%{other_transaction_name: name} = tx), do: Map.put(tx, :name, name)
33
34
defp transform_time_attrs(
35
%{start_time: start_time, end_time_mono: end_time_mono, start_time_mono: start_time_mono} =
36
tx
37
) do
38
start_time = System.convert_time_unit(start_time, :native, :millisecond)
39
duration_us = System.convert_time_unit(end_time_mono - start_time_mono, :native, :microsecond)
40
duration_ms = System.convert_time_unit(end_time_mono - start_time_mono, :native, :millisecond)
41
42
tx
43
|> Map.drop([:start_time_mono, :end_time_mono])
44
|> Map.merge(%{
45
start_time: start_time,
46
end_time: start_time duration_ms,
47
duration_us: duration_us,
48
duration_ms: duration_ms,
49
duration_s: duration_ms / 1000
50
})
51
end
52
53
defp extract_transaction_info(tx_attrs, pid) do
54
{function_segments, tx_attrs} = Map.pop(tx_attrs, :trace_function_segments, [])
55
{process_spawns, tx_attrs} = Map.pop(tx_attrs, :trace_process_spawns, [])
56
{process_names, tx_attrs} = Map.pop(tx_attrs, :trace_process_names, [])
57
{process_exits, tx_attrs} = Map.pop(tx_attrs, :trace_process_exits, [])
58
{tx_error, tx_attrs} = Map.pop(tx_attrs, :transaction_error, nil)
59
60
apdex = calculate_apdex(tx_attrs, tx_error)
61
62
tx_attrs =
63
tx_attrs
64
|> Map.merge(NewRelic.Config.automatic_attributes())
65
|> Map.put(:"nr.apdexPerfZone", Util.Apdex.label(apdex))
66
67
function_segments =
68
function_segments
69
|> Enum.map(&transform_time_attrs/1)
70
|> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
71
|> Enum.map(&transform_trace_name_attrs/1)
72
|> Enum.map(&struct(Transaction.Trace.Segment, &1))
73
|> Enum.group_by(& &1.pid)
74
|> Enum.into(%{}, &generate_process_segment_tree(&1))
75
76
top_segment =
77
tx_attrs
78
|> Map.take([:name, :pid, :start_time, :end_time])
79
|> List.wrap()
80
|> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
81
|> Enum.map(&transform_trace_name_attrs/1)
82
|> Enum.map(&struct(Transaction.Trace.Segment, &1))
83
|> List.first()
84
|> Map.put(:id, pid)
85
86
top_segment =
87
process_spawns
88
|> collect_process_segments(process_names, process_exits)
89
|> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
90
|> Enum.map(&transform_trace_name_attrs/1)
91
|> Enum.map(&struct(Transaction.Trace.Segment, &1))
92
|> Enum.sort_by(& &1.relative_start_time)
93
|> Enum.map(&Map.put(&1, :children, function_segments[&1.pid] || []))
94
|> generate_process_tree(root: top_segment)
95
96
top_children = List.wrap(function_segments[inspect(pid)])
97
top_segment = Map.update!(top_segment, :children, &(&1 top_children))
98
99
span_events = extract_span_events(tx_attrs, pid, process_spawns, process_names, process_exits)
100
101
{[top_segment], tx_attrs, tx_error, span_events, apdex}
102
end
103
104
defp extract_span_events(tx_attrs, pid, spawns, names, exits) do
105
spawned_process_span_events(tx_attrs, spawns, names, exits)
106
|> add_root_process_span_event(tx_attrs, pid)
107
end
108
109
defp calculate_apdex(%{other_transaction_name: _}, _error) do
110
:ignore
111
end
112
113
defp calculate_apdex(_tx_attrs, {:error, _error}) do
114
:frustrating
115
end
116
117
defp calculate_apdex(%{duration_s: duration_s}, nil) do
118
Util.Apdex.calculate(duration_s, apdex_t())
119
end
120
121
defp add_root_process_span_event(spans, %{sampled: true} = tx_attrs, pid) do
122
[
123
%NewRelic.Span.Event{
124
trace_id: tx_attrs[:traceId],
125
transaction_id: tx_attrs[:guid],
126
sampled: true,
127
priority: tx_attrs[:priority],
128
category: "generic",
129
name: "Transaction Root Process #{inspect(pid)}",
130
guid: DistributedTrace.generate_guid(pid: pid),
131
parent_id: tx_attrs[:parentSpanId],
132
timestamp: tx_attrs[:start_time],
133
duration: tx_attrs[:duration_s],
134
entry_point: true
135
}
136
| spans
137
]
138
end
139
140
defp add_root_process_span_event(spans, _tx_attrs, _pid), do: spans
141
142
defp spawned_process_span_events(tx_attrs, process_spawns, process_names, process_exits) do
143
process_spawns
144
|> collect_process_segments(process_names, process_exits)
145
|> Enum.map(&transform_trace_name_attrs/1)
146
|> Enum.map(fn proc ->
147
%NewRelic.Span.Event{
148
trace_id: tx_attrs[:traceId],
149
transaction_id: tx_attrs[:guid],
150
sampled: tx_attrs[:sampled],
151
priority: tx_attrs[:priority],
152
category: "generic",
153
name: "Process #{proc.name || proc.pid}",
154
guid: DistributedTrace.generate_guid(pid: proc.id),
155
parent_id: DistributedTrace.generate_guid(pid: proc.parent_id),
156
timestamp: proc[:start_time],
157
duration: (proc[:end_time] - proc[:start_time]) / 1000
158
}
159
end)
160
end
161
162
defp collect_process_segments(spawns, names, exits) do
163
for {pid, start_time, original} <- spawns,
164
{^pid, name} <- names,
165
{^pid, end_time} <- exits do
166
%{
167
pid: inspect(pid),
168
id: pid,
169
parent_id: original,
170
name: name,
171
start_time: start_time,
172
end_time: end_time
173
}
174
end
175
end
176
177
defp transform_trace_time_attrs(
178
%{start_time: start_time, end_time: end_time} = attrs,
179
trace_start_time
180
) do
181
attrs
182
|> Map.merge(%{
183
relative_start_time: start_time - trace_start_time,
184
relative_end_time: end_time - trace_start_time
185
})
186
end
187
188
defp transform_trace_name_attrs(
189
%{
190
primary_name: metric_name,
191
secondary_name: class_name,
192
attributes: attributes
193
} = attrs
194
) do
195
attrs
196
|> Map.merge(%{
197
class_name: class_name,
198
method_name: nil,
199
metric_name: metric_name |> String.replace("/", ""),
200
attributes: attributes
201
})
202
end
203
204
defp transform_trace_name_attrs(
205
%{
206
module: module,
207
function: function,
208
arity: arity,
209
args: args
210
} = attrs
211
) do
212
attrs
213
|> Map.merge(%{
214
class_name: "#{function}/#{arity}",
215
method_name: nil,
216
metric_name: "#{inspect(module)}.#{function}",
217
attributes: %{query: inspect(args, charlists: false)}
218
})
219
end
220
221
defp transform_trace_name_attrs(%{pid: pid, name: name} = attrs) do
222
attrs
223
|> Map.merge(%{class_name: name || "Process", method_name: nil, metric_name: pid})
224
end
225
226
defp generate_process_tree(processes, root: root) do
227
parent_map = Enum.group_by(processes, & &1.parent_id)
228
generate_tree(root, parent_map)
229
end
230
231
defp generate_process_segment_tree({pid, segments}) do
232
parent_map = Enum.group_by(segments, & &1.parent_id)
233
%{children: children} = generate_tree(%{id: :root}, parent_map)
234
{pid, children}
235
end
236
237
defp generate_tree(leaf, parent_map) when map_size(parent_map) == 0 do
238
leaf
239
end
240
241
defp generate_tree(parent, parent_map) do
242
{children, parent_map} = Map.pop(parent_map, parent.id, [])
243
244
children =
245
children
246
|> Enum.sort_by(& &1.relative_start_time)
247
|> Enum.map(&generate_tree(&1, parent_map))
248
249
Map.update(parent, :children, children, &(&1 children))
250
end
251
252
defp report_caller_metric(
253
%{
254
"parent.type": parent_type,
255
"parent.account": parent_account_id,
256
"parent.app": parent_app_id,
257
"parent.transportType": transport_type
258
} = tx_attrs
259
) do
260
NewRelic.report_metric(
261
{:caller, parent_type, parent_account_id, parent_app_id, transport_type},
262
duration_s: tx_attrs.duration_s
263
)
264
end
265
266
defp report_caller_metric(tx_attrs) do
267
NewRelic.report_metric(
268
{:caller, "Unknown", "Unknown", "Unknown", "Unknown"},
269
duration_s: tx_attrs.duration_s
270
)
271
end
272
273
defp report_span_events(span_events) do
274
Enum.each(span_events, &Collector.SpanEvent.Harvester.report_span_event/1)
275
end
276
277
defp report_transaction_event(%{transaction_type: :web} = tx_attrs) do
278
Collector.TransactionEvent.Harvester.report_event(%Transaction.Event{
279
timestamp: tx_attrs.start_time,
280
duration: tx_attrs.duration_s,
281
name: Util.metric_join(["WebTransaction", tx_attrs.name]),
282
user_attributes:
283
Map.merge(tx_attrs, %{
284
request_url: "#{tx_attrs.host}#{tx_attrs.path}"
285
})
286
})
287
end
288
289
defp report_transaction_event(tx_attrs) do
290
Collector.TransactionEvent.Harvester.report_event(%Transaction.Event{
291
timestamp: tx_attrs.start_time,
292
duration: tx_attrs.duration_s,
293
name: Util.metric_join(["OtherTransaction", tx_attrs.name]),
294
user_attributes: tx_attrs
295
})
296
end
297
298
defp report_transaction_trace(%{other_transaction_name: _} = tx_attrs, tx_segments) do
299
Collector.TransactionTrace.Harvester.report_trace(%Transaction.Trace{
300
start_time: tx_attrs.start_time,
301
metric_name: Util.metric_join(["OtherTransaction", tx_attrs.name]),
302
request_url: "/Unknown",
303
attributes: %{agentAttributes: tx_attrs},
304
segments: tx_segments,
305
duration: tx_attrs.duration_ms
306
})
307
end
308
309
defp report_transaction_trace(tx_attrs, tx_segments) do
310
Collector.TransactionTrace.Harvester.report_trace(%Transaction.Trace{
311
start_time: tx_attrs.start_time,
312
metric_name: Util.metric_join(["WebTransaction", tx_attrs.name]),
313
request_url: "#{tx_attrs.host}#{tx_attrs.path}",
314
attributes: %{agentAttributes: tx_attrs},
315
segments: tx_segments,
316
duration: tx_attrs.duration_ms
317
})
318
end
319
320
defp report_transaction_error_event(_tx_attrs, nil), do: :ignore
321
322
defp report_transaction_error_event(tx_attrs, {:error, error}) do
323
attributes = Map.drop(tx_attrs, [:error, :error_kind, :error_reason, :error_stack])
324
expected = parse_error_expected(error.reason)
325
326
{exception_type, exception_reason, exception_stacktrace} =
327
Util.Error.normalize(error.reason, error.stack)
328
329
report_error_trace(
330
tx_attrs,
331
exception_type,
332
exception_reason,
333
expected,
334
exception_stacktrace,
335
attributes,
336
error
337
)
338
339
report_error_event(
340
tx_attrs,
341
exception_type,
342
exception_reason,
343
expected,
344
exception_stacktrace,
345
attributes,
346
error
347
)
348
349
unless expected do
350
NewRelic.report_metric({:supportability, :error_event}, error_count: 1)
351
NewRelic.report_metric(:error, error_count: 1)
352
end
353
end
354
355
defp report_error_trace(
356
%{other_transaction_name: _} = tx_attrs,
357
exception_type,
358
exception_reason,
359
expected,
360
exception_stacktrace,
361
attributes,
362
error
363
) do
364
Collector.ErrorTrace.Harvester.report_error(%NewRelic.Error.Trace{
365
timestamp: tx_attrs.start_time / 1_000,
366
error_type: inspect(exception_type),
367
message: exception_reason,
368
expected: expected,
369
stack_trace: exception_stacktrace,
370
transaction_name: Util.metric_join(["OtherTransaction", tx_attrs.name]),
371
agent_attributes: %{},
372
user_attributes: Map.merge(attributes, %{process: error[:process]})
373
})
374
end
375
376
defp report_error_trace(
377
tx_attrs,
378
exception_type,
379
exception_reason,
380
expected,
381
exception_stacktrace,
382
attributes,
383
error
384
) do
385
Collector.ErrorTrace.Harvester.report_error(%NewRelic.Error.Trace{
386
timestamp: tx_attrs.start_time / 1_000,
387
error_type: inspect(exception_type),
388
message: exception_reason,
389
expected: expected,
390
stack_trace: exception_stacktrace,
391
transaction_name: Util.metric_join(["WebTransaction", tx_attrs.name]),
392
agent_attributes: %{
393
request_uri: "#{tx_attrs.host}#{tx_attrs.path}"
394
},
395
user_attributes: Map.merge(attributes, %{process: error[:process]})
396
})
397
end
398
399
defp report_error_event(
400
%{other_transaction_name: _} = tx_attrs,
401
exception_type,
402
exception_reason,
403
expected,
404
exception_stacktrace,
405
attributes,
406
error
407
) do
408
Collector.TransactionErrorEvent.Harvester.report_error(%NewRelic.Error.Event{
409
timestamp: tx_attrs.start_time / 1_000,
410
error_class: inspect(exception_type),
411
error_message: exception_reason,
412
expected: expected,
413
transaction_name: Util.metric_join(["OtherTransaction", tx_attrs.name]),
414
agent_attributes: %{},
415
user_attributes:
416
Map.merge(attributes, %{
417
process: error[:process],
418
stacktrace: Enum.join(exception_stacktrace, "\n")
419
})
420
})
421
end
422
423
defp report_error_event(
424
tx_attrs,
425
exception_type,
426
exception_reason,
427
expected,
428
exception_stacktrace,
429
attributes,
430
error
431
) do
432
Collector.TransactionErrorEvent.Harvester.report_error(%NewRelic.Error.Event{
433
timestamp: tx_attrs.start_time / 1_000,
434
error_class: inspect(exception_type),
435
error_message: exception_reason,
436
expected: expected,
437
transaction_name: Util.metric_join(["WebTransaction", tx_attrs.name]),
438
agent_attributes: %{
439
http_response_code: tx_attrs.status,
440
request_method: tx_attrs.request_method
441
},
442
user_attributes:
443
Map.merge(attributes, %{
444
process: error[:process],
445
stacktrace: Enum.join(exception_stacktrace, "\n")
446
})
447
})
448
end
449
450
defp report_aggregate(%{other_transaction_name: _} = tx) do
451
NewRelic.report_aggregate(%{type: :OtherTransaction, name: tx[:name]}, %{
452
duration_us: tx.duration_us,
453
duration_ms: tx.duration_ms,
454
call_count: 1
455
})
456
end
457
458
defp report_aggregate(tx) do
459
NewRelic.report_aggregate(%{type: :Transaction, name: tx[:name]}, %{
460
duration_us: tx.duration_us,
461
duration_ms: tx.duration_ms,
462
call_count: 1
463
})
464
end
465
466
def report_transaction_metric(%{other_transaction_name: _} = tx) do
467
NewRelic.report_metric({:other_transaction, tx.name}, duration_s: tx.duration_s)
468
end
469
470
def report_transaction_metric(tx) do
471
NewRelic.report_metric({:transaction, tx.name}, duration_s: tx.duration_s)
472
end
473
474
def report_apdex_metric(:ignore), do: :ignore
475
476
def report_apdex_metric(apdex) do
477
NewRelic.report_metric(:apdex, apdex: apdex, threshold: apdex_t())
478
end
479
480
def apdex_t, do: Collector.AgentRun.lookup(:apdex_t)
481
482
defp parse_error_expected(%{expected: true}), do: true
483
defp parse_error_expected(_), do: false
484
end
changed lib/new_relic/transaction/monitor.ex
 
@@ -1,6 1,7 @@
1
1
defmodule NewRelic.Transaction.Monitor do
2
2
use GenServer
3
3
alias NewRelic.Transaction
4
alias NewRelic.DistributedTrace
4
5
5
6
# This GenServer watches transaction processes for
6
7
# - :trace messages
 
@@ -72,6 73,8 @@ defmodule NewRelic.Transaction.Monitor do
72
73
73
74
def handle_info({:DOWN, _ref, :process, pid, _reason}, state) do
74
75
Transaction.Reporter.ensure_purge(pid)
76
Transaction.Reporter.complete(pid)
77
DistributedTrace.Tracker.cleanup(pid)
75
78
{:noreply, %{state | pids: Map.delete(state.pids, pid)}}
76
79
end
changed lib/new_relic/transaction/plug.ex
 
@@ -38,7 38,7 @@ defmodule NewRelic.Transaction.Plug do
38
38
39
39
defp before_send(conn) do
40
40
add_stop_attrs(conn)
41
- Transaction.Reporter.stop(conn)
41
Transaction.Reporter.complete()
42
42
conn
43
43
end
44
44
 
@@ -60,6 60,7 @@ defmodule NewRelic.Transaction.Plug do
60
60
info = Process.info(self(), [:memory, :reductions])
61
61
62
62
[
63
plug_name: plug_name(conn),
63
64
status: conn.status,
64
65
memory_kb: info[:memory] / @kb,
65
66
reductions: info[:reductions]
changed lib/new_relic/transaction/reporter.ex
 
@@ -1,11 1,8 @@
1
1
defmodule NewRelic.Transaction.Reporter do
2
2
use GenServer
3
3
4
- alias NewRelic.Util
5
4
alias NewRelic.Util.AttrStore
6
5
alias NewRelic.Transaction
7
- alias NewRelic.Harvest.Collector
8
- alias NewRelic.DistributedTrace
9
6
10
7
# This GenServer collects and reports Transaction related data
11
8
# - Transaction Events
 
@@ -51,6 48,13 @@ defmodule NewRelic.Transaction.Reporter do
51
48
)
52
49
end
53
50
51
def start_other_transaction(category, name) do
52
unless tracking?(self()) do
53
start()
54
AttrStore.add(__MODULE__, self(), other_transaction_name: "#{category}/#{name}")
55
end
56
end
57
54
58
def fail(%{kind: kind, reason: reason, stack: stack} = error) do
55
59
if tracking?(self()) do
56
60
if NewRelic.Config.feature?(:error_collector) do
 
@@ -67,15 71,6 @@ defmodule NewRelic.Transaction.Reporter do
67
71
end
68
72
end
69
73
70
- def stop(%Plug.Conn{} = conn) do
71
- add_attributes(
72
- plug_name: Transaction.Plug.plug_name(conn),
73
- end_time_mono: System.monotonic_time()
74
- )
75
-
76
- complete()
77
- end
78
-
79
74
def add_trace_segment(segment) do
80
75
if tracking?(self()) do
81
76
AttrStore.add(__MODULE__, self(), trace_function_segments: {:list, segment})
 
@@ -90,8 85,12 @@ defmodule NewRelic.Transaction.Reporter do
90
85
91
86
def complete(pid \\ self()) do
92
87
if tracking?(pid) do
88
AttrStore.add(__MODULE__, pid, end_time_mono: System.monotonic_time())
89
AttrStore.untrack(__MODULE__, pid)
90
93
91
Task.Supervisor.start_child(NewRelic.Transaction.TaskSupervisor, fn ->
94
- complete_transaction(pid)
92
AttrStore.collect(__MODULE__, pid)
93
|> Transaction.Complete.run(pid)
95
94
end)
96
95
end
97
96
end
 
@@ -149,337 148,4 @@ defmodule NewRelic.Transaction.Reporter do
149
148
def tracking?(pid), do: AttrStore.tracking?(__MODULE__, pid)
150
149
151
150
def root(pid), do: AttrStore.find_root(__MODULE__, pid)
152
-
153
- def complete_transaction(pid) do
154
- AttrStore.untrack(__MODULE__, pid)
155
- tx_attrs = AttrStore.collect(__MODULE__, pid)
156
-
157
- {tx_segments, tx_attrs, tx_error, span_events} = gather_transaction_info(tx_attrs, pid)
158
- tx_attrs = Map.merge(tx_attrs, NewRelic.Config.automatic_attributes())
159
-
160
- report_transaction_event(tx_attrs)
161
- report_transaction_trace(tx_attrs, tx_segments)
162
- report_transaction_error_event(tx_attrs, tx_error)
163
- report_transaction_metric(tx_attrs)
164
- report_aggregate(tx_attrs)
165
- report_caller_metric(tx_attrs)
166
- report_span_events(span_events)
167
- end
168
-
169
- defp gather_transaction_info(tx_attrs, pid) do
170
- tx_attrs
171
- |> transform_name_attrs
172
- |> transform_time_attrs
173
- |> extract_transaction_info(pid)
174
- end
175
-
176
- defp transform_time_attrs(
177
- %{start_time: start_time, end_time_mono: end_time_mono, start_time_mono: start_time_mono} =
178
- tx
179
- ),
180
- do:
181
- tx
182
- |> Map.drop([:start_time_mono, :end_time_mono])
183
- |> Map.merge(%{
184
- start_time: System.convert_time_unit(start_time, :native, :millisecond),
185
- end_time:
186
- System.convert_time_unit(
187
- start_time (end_time_mono - start_time_mono),
188
- :native,
189
- :millisecond
190
- ),
191
- duration_us:
192
- System.convert_time_unit(end_time_mono - start_time_mono, :native, :microsecond),
193
- duration_ms:
194
- System.convert_time_unit(end_time_mono - start_time_mono, :native, :millisecond)
195
- })
196
-
197
- defp transform_name_attrs(%{custom_name: name} = tx), do: Map.put(tx, :name, name)
198
- defp transform_name_attrs(%{framework_name: name} = tx), do: Map.put(tx, :name, name)
199
- defp transform_name_attrs(%{plug_name: name} = tx), do: Map.put(tx, :name, name)
200
-
201
- defp extract_transaction_info(tx_attrs, pid) do
202
- {function_segments, tx_attrs} = Map.pop(tx_attrs, :trace_function_segments, [])
203
- {process_spawns, tx_attrs} = Map.pop(tx_attrs, :trace_process_spawns, [])
204
- {process_names, tx_attrs} = Map.pop(tx_attrs, :trace_process_names, [])
205
- {process_exits, tx_attrs} = Map.pop(tx_attrs, :trace_process_exits, [])
206
- {tx_error, tx_attrs} = Map.pop(tx_attrs, :transaction_error, nil)
207
-
208
- span_events =
209
- spawned_process_events(tx_attrs, process_spawns, process_names, process_exits)
210
- |> add_cowboy_process_event(tx_attrs, pid)
211
-
212
- function_segments =
213
- function_segments
214
- |> Enum.map(&transform_time_attrs/1)
215
- |> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
216
- |> Enum.map(&transform_trace_name_attrs/1)
217
- |> Enum.map(&struct(Transaction.Trace.Segment, &1))
218
- |> Enum.group_by(& &1.pid)
219
- |> Enum.into(%{}, &generate_process_segment_tree(&1))
220
-
221
- top_segment =
222
- tx_attrs
223
- |> Map.take([:name, :pid, :start_time, :end_time])
224
- |> List.wrap()
225
- |> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
226
- |> Enum.map(&transform_trace_name_attrs/1)
227
- |> Enum.map(&struct(Transaction.Trace.Segment, &1))
228
- |> List.first()
229
- |> Map.put(:id, pid)
230
-
231
- top_segment =
232
- process_spawns
233
- |> collect_process_segments(process_names, process_exits)
234
- |> Enum.map(&transform_trace_time_attrs(&1, tx_attrs.start_time))
235
- |> Enum.map(&transform_trace_name_attrs/1)
236
- |> Enum.map(&struct(Transaction.Trace.Segment, &1))
237
- |> Enum.sort_by(& &1.relative_start_time)
238
- |> Enum.map(&Map.put(&1, :children, function_segments[&1.pid] || []))
239
- |> generate_process_tree(root: top_segment)
240
-
241
- top_children = List.wrap(function_segments[inspect(pid)])
242
- top_segment = Map.update!(top_segment, :children, &(&1 top_children))
243
-
244
- {[top_segment], tx_attrs, tx_error, span_events}
245
- end
246
-
247
- defp add_cowboy_process_event(spans, %{sampled: true} = tx_attrs, pid) do
248
- [
249
- %NewRelic.Span.Event{
250
- trace_id: tx_attrs[:traceId],
251
- transaction_id: tx_attrs[:guid],
252
- sampled: true,
253
- priority: tx_attrs[:priority],
254
- category: "generic",
255
- name: "Cowboy Process #{inspect(pid)}",
256
- guid: DistributedTrace.generate_guid(pid: pid),
257
- parent_id: tx_attrs[:parentSpanId],
258
- timestamp: tx_attrs[:start_time],
259
- duration: tx_attrs[:duration_ms] / 1000,
260
- entry_point: true
261
- }
262
- | spans
263
- ]
264
- end
265
-
266
- defp add_cowboy_process_event(spans, _tx_attrs, _pid), do: spans
267
-
268
- defp spawned_process_events(tx_attrs, process_spawns, process_names, process_exits) do
269
- process_spawns
270
- |> collect_process_segments(process_names, process_exits)
271
- |> Enum.map(&transform_trace_name_attrs/1)
272
- |> Enum.map(fn proc ->
273
- %NewRelic.Span.Event{
274
- trace_id: tx_attrs[:traceId],
275
- transaction_id: tx_attrs[:guid],
276
- sampled: tx_attrs[:sampled],
277
- priority: tx_attrs[:priority],
278
- category: "generic",
279
- name: "Process #{proc.name || proc.pid}",
280
- guid: DistributedTrace.generate_guid(pid: proc.id),
281
- parent_id: DistributedTrace.generate_guid(pid: proc.parent_id),
282
- timestamp: proc[:start_time],
283
- duration: (proc[:end_time] - proc[:start_time]) / 1000
284
- }
285
- end)
286
- end
287
-
288
- defp collect_process_segments(spawns, names, exits) do
289
- for {pid, start_time, original} <- spawns,
290
- {^pid, name} <- names,
291
- {^pid, end_time} <- exits do
292
- %{
293
- pid: inspect(pid),
294
- id: pid,
295
- parent_id: original,
296
- name: name,
297
- start_time: start_time,
298
- end_time: end_time
299
- }
300
- end
301
- end
302
-
303
- defp transform_trace_time_attrs(
304
- %{start_time: start_time, end_time: end_time} = attrs,
305
- trace_start_time
306
- ),
307
- do:
308
- attrs
309
- |> Map.merge(%{
310
- relative_start_time: start_time - trace_start_time,
311
- relative_end_time: end_time - trace_start_time
312
- })
313
-
314
- defp transform_trace_name_attrs(
315
- %{
316
- primary_name: metric_name,
317
- secondary_name: class_name,
318
- attributes: attributes
319
- } = attrs
320
- ) do
321
- attrs
322
- |> Map.merge(%{
323
- class_name: class_name,
324
- method_name: nil,
325
- metric_name: metric_name |> String.replace("/", ""),
326
- attributes: attributes
327
- })
328
- end
329
-
330
- defp transform_trace_name_attrs(
331
- %{
332
- module: module,
333
- function: function,
334
- arity: arity,
335
- args: args
336
- } = attrs
337
- ),
338
- do:
339
- attrs
340
- |> Map.merge(%{
341
- class_name: "#{function}/#{arity}",
342
- method_name: nil,
343
- metric_name: "#{inspect(module)}.#{function}",
344
- attributes: %{query: inspect(args, charlists: false)}
345
- })
346
-
347
- defp transform_trace_name_attrs(%{pid: pid, name: name} = attrs),
348
- do:
349
- attrs
350
- |> Map.merge(%{class_name: name || "Process", method_name: nil, metric_name: pid})
351
-
352
- defp generate_process_tree(processes, root: root) do
353
- parent_map = Enum.group_by(processes, & &1.parent_id)
354
- generate_tree(root, parent_map)
355
- end
356
-
357
- defp generate_process_segment_tree({pid, segments}) do
358
- parent_map = Enum.group_by(segments, & &1.parent_id)
359
- %{children: children} = generate_tree(%{id: :root}, parent_map)
360
- {pid, children}
361
- end
362
-
363
- defp generate_tree(leaf, parent_map) when map_size(parent_map) == 0 do
364
- leaf
365
- end
366
-
367
- defp generate_tree(parent, parent_map) do
368
- {children, parent_map} = Map.pop(parent_map, parent.id, [])
369
-
370
- children =
371
- children
372
- |> Enum.sort_by(& &1.relative_start_time)
373
- |> Enum.map(&generate_tree(&1, parent_map))
374
-
375
- Map.update(parent, :children, children, &(&1 children))
376
- end
377
-
378
- defp report_caller_metric(
379
- %{
380
- "parent.type": parent_type,
381
- "parent.account": parent_account_id,
382
- "parent.app": parent_app_id,
383
- "parent.transportType": transport_type
384
- } = tx_attrs
385
- ) do
386
- NewRelic.report_metric(
387
- {:caller, parent_type, parent_account_id, parent_app_id, transport_type},
388
- duration_s: tx_attrs[:duration_ms] / 1000
389
- )
390
- end
391
-
392
- defp report_caller_metric(tx_attrs) do
393
- NewRelic.report_metric(
394
- {:caller, "Unknown", "Unknown", "Unknown", "Unknown"},
395
- duration_s: tx_attrs[:duration_ms] / 1000
396
- )
397
- end
398
-
399
- defp report_span_events(span_events) do
400
- Enum.each(span_events, &Collector.SpanEvent.Harvester.report_span_event/1)
401
- end
402
-
403
- defp report_transaction_event(tx_attrs) do
404
- Collector.TransactionEvent.Harvester.report_event(%Transaction.Event{
405
- timestamp: tx_attrs.start_time,
406
- duration: tx_attrs.duration_ms / 1_000,
407
- name: "WebTransaction#{tx_attrs.name}",
408
- user_attributes:
409
- Map.merge(tx_attrs, %{
410
- request_url: "#{tx_attrs.host}#{tx_attrs.path}"
411
- })
412
- })
413
- end
414
-
415
- defp report_transaction_trace(tx_attrs, tx_segments) do
416
- Collector.TransactionTrace.Harvester.report_trace(%Transaction.Trace{
417
- start_time: tx_attrs.start_time,
418
- metric_name: "WebTransaction#{tx_attrs.name}",
419
- request_url: "#{tx_attrs.host}#{tx_attrs.path}",
420
- attributes: %{agentAttributes: tx_attrs},
421
- segments: tx_segments,
422
- duration: tx_attrs.duration_ms
423
- })
424
- end
425
-
426
- defp report_transaction_error_event(_tx_attrs, nil), do: :ignore
427
-
428
- defp report_transaction_error_event(tx_attrs, {:error, error}) do
429
- attributes = Map.drop(tx_attrs, [:error, :error_kind, :error_reason, :error_stack])
430
-
431
- {exception_type, exception_reason, exception_stacktrace} =
432
- Util.Error.normalize(error.reason, error.stack)
433
-
434
- expected = parse_error_expected(error.reason)
435
-
436
- Collector.ErrorTrace.Harvester.report_error(%NewRelic.Error.Trace{
437
- timestamp: tx_attrs.start_time / 1_000,
438
- error_type: inspect(exception_type),
439
- message: exception_reason,
440
- expected: expected,
441
- stack_trace: exception_stacktrace,
442
- transaction_name: "WebTransaction#{tx_attrs.name}",
443
- request_uri: "#{tx_attrs.host}#{tx_attrs.path}",
444
- user_attributes:
445
- Map.merge(attributes, %{
446
- process: error[:process]
447
- })
448
- })
449
-
450
- Collector.TransactionErrorEvent.Harvester.report_error(%NewRelic.Error.Event{
451
- timestamp: tx_attrs.start_time / 1_000,
452
- error_class: inspect(exception_type),
453
- error_message: exception_reason,
454
- expected: expected,
455
- transaction_name: "WebTransaction#{tx_attrs.name}",
456
- http_response_code: tx_attrs.status,
457
- request_method: tx_attrs.request_method,
458
- user_attributes:
459
- Map.merge(attributes, %{
460
- process: error[:process],
461
- stacktrace: Enum.join(exception_stacktrace, "\n")
462
- })
463
- })
464
-
465
- unless expected do
466
- NewRelic.report_metric({:supportability, :error_event}, error_count: 1)
467
- NewRelic.report_metric(:error, error_count: 1)
468
- end
469
- end
470
-
471
- defp report_aggregate(tx) do
472
- NewRelic.report_aggregate(%{type: :Transaction, name: tx[:name]}, %{
473
- duration_us: tx.duration_us,
474
- duration_ms: tx.duration_ms,
475
- call_count: 1
476
- })
477
- end
478
-
479
- def report_transaction_metric(tx) do
480
- NewRelic.report_metric({:transaction, tx.name}, duration_s: tx.duration_ms / 1_000)
481
- end
482
-
483
- defp parse_error_expected(%{expected: true}), do: true
484
- defp parse_error_expected(_), do: false
485
151
end
changed lib/new_relic/util.ex
 
@@ -24,6 24,15 @@ defmodule NewRelic.Util do
24
24
end
25
25
end
26
26
27
def metric_join(segments) when is_list(segments) do
28
segments
29
|> Enum.filter(& &1)
30
|> Enum.map(&to_string/1)
31
|> Enum.map(&String.replace_leading(&1, "/", ""))
32
|> Enum.map(&String.replace_trailing(&1, "/", ""))
33
|> Enum.join("/")
34
end
35
27
36
def deep_flatten(attrs) when is_list(attrs) do
28
37
Enum.flat_map(attrs, &deep_flatten/1)
29
38
end
added lib/new_relic/util/apdex.ex
 
@@ -0,0 1,14 @@
1
defmodule NewRelic.Util.Apdex do
2
@moduledoc false
3
4
# https://en.wikipedia.org/wiki/Apdex
5
6
def calculate(dur, apdex_t) when dur < apdex_t, do: :satisfying
7
def calculate(dur, apdex_t) when dur < apdex_t * 4, do: :tolerating
8
def calculate(_dur, _apdex_t), do: :frustrating
9
10
def label(:satisfying), do: "S"
11
def label(:tolerating), do: "T"
12
def label(:frustrating), do: "F"
13
def label(:ignore), do: nil
14
end