743 Network Delay Time¶
There are N network nodes, labelled 1 to N.
Given times, a list of travel times as directed edges times[i] = (u, v, w), where u is the source node, v is the target node, and w is the time it takes for a signal to travel from source to target.
Now, we send a signal from a certain node K. How long will it take for all nodes to receive the signal? If it is impossible, return -1.
Note:
N will be in the range [1, 100]. K will be in the range [1, N]. The length of times will be in the range [1, 6000]. All edges times[i] = (u, v, w) will have 1 <= u, v <= N and 1 <= w <= 100.
:::tip Idea: Construct the graph and do a shortest path from K to all other nodes. :::
Solution 1 : Bellman-Ford¶
Time complexity: \(O(ne)\) Space complexity: \(O(n)\)
// Runtime: 92 ms
class Solution {
public:
int networkDelayTime(vector<vector<int>>& times, int N, int K) {
constexpr int MAX_TIME = 101 * 100;
vector<int> dist(N, MAX_TIME);
dist[K - 1] = 0;
for (int i = 1; i < N; ++i)
for (const auto& time : times) {
int u = time[0] - 1, v = time[1] - 1, w = time[2];
dist[v] = min(dist[v], dist[u] + w);
}
int max_dist = *max_element(dist.begin(), dist.end());
return max_dist == MAX_TIME ? -1 : max_dist;
}
};
Solution3 : Floyd-Warshall¶
Time complexity: \(O(n^3)\) Space complexity: \(O(n^2)\)
V1
// Runtime: 92 ms
class Solution {
public:
int networkDelayTime(vector<vector<int>>& times, int N, int K) {
vector<vector<int>> d(N, vector<int>(N, -1));
for (auto time : times)
d[time[0] - 1][time[1] - 1] = time[2];
for (int i = 0; i < N; ++i)
d[i][i] = 0;
for (int k = 0; k < N; ++k)
for (int i = 0; i < N; ++i)
for (int j = 0; j < N; ++j)
if (d[i][k] >= 0 && d[k][j] >= 0)
if (d[i][j] < 0 || d[i][j] > d[i][k] + d[k][j])
d[i][j] = d[i][k] + d[k][j];
int ans = INT_MIN;
for (int i = 0; i < N; ++i) {
if (d[K - 1][i] < 0) return -1;
ans = max(ans, d[K - 1][i]);
}
return ans;
}
};
v2
// Runtime: 89 ms
class Solution {
public:
int networkDelayTime(vector<vector<int>>& times, int N, int K) {
constexpr int MAX_TIME = 101 * 100;
vector<vector<int>> d(N, vector<int>(N, MAX_TIME));
for (auto time : times)
d[time[0] - 1][time[1] - 1] = time[2];
for (int i = 0; i < N; ++i)
d[i][i] = 0;
for (int k = 0; k < N; ++k)
for (int i = 0; i < N; ++i)
for (int j = 0; j < N; ++j)
d[i][j] = min(d[i][j], d[i][k] + d[k][j]);
int max_time = *max_element(d[K - 1].begin(), d[K - 1].end());
return max_time >= MAX_TIME ? - 1 : max_time;
}
};