mod = 1000000007 eps = 10**-9 def main(): import sys input = sys.stdin.buffer.readline N, M = map(int, input().split()) LR0 = [] LR1 = [] imos0 = [0] * (N + 2) imos1 = [0] * (N + 2) for _ in range(M): l, r, p = map(int, input().split()) if p == 0: LR0.append((l, r)) imos0[l] += 1 imos0[r+1] -= 1 else: LR1.append((l, r)) imos1[l] += 1 imos1[r+1] -= 1 seg0 = [0] * (N+1) seg1 = [0] * (N+1) F = 0 N1 = 0 dp = [0] * (N+1) dp[0] = 1 LR0.sort(key=lambda x: x[1], reverse=True) RmostL = 0 S = [0] * (N+1) cnt = [0] * (N+1) for i in range(N): seg0[i+1] = seg0[i] + imos0[i+1] seg1[i+1] = seg1[i] + imos1[i+1] if seg0[i+1] == seg1[i+1] == 0: F += 1 dp[i+1] = dp[i] cnt[i+1] = cnt[i] S[i+1] = S[i] elif seg1[i+1] == 0: if LR0: if LR0[-1][1] == i+1: l, r = LR0.pop() RmostL = max(RmostL, l) dp[i+1] = (dp[i] + S[i] - (S[RmostL] * pow(2, cnt[i]+1 - cnt[RmostL]))%mod)%mod S[i+1] = (S[i]*2 + dp[i+1])%mod cnt[i+1] = cnt[i]+1 else: N1 += 1 dp[i+1] = dp[i] cnt[i+1] = cnt[i] S[i+1] = S[i] # min def STfunc(a, b): if a < b: return a else: return b # クエリは0-indexedで[l, r) class SparseTable(): def __init__(self, A): # A: 処理したい数列 self.N = len(A) self.K = self.N.bit_length() - 1 self.table = [0] * (self.N * (self.K + 1)) for i, a in enumerate(A): self.table[i] = a for k in range(1, self.K + 1): for i in range(self.N): j = i + (1 << (k - 1)) if j <= self.N - 1: self.table[i + k * self.N] = STfunc(self.table[i + (k - 1) * self.N], self.table[j + (k - 1) * self.N]) def query(self, l, r): # [l, r)の最小値を求める k = (r - l).bit_length() - 1 return STfunc(self.table[l + k * self.N], self.table[r - (1 << k) + k * self.N]) if seg1: ST = SparseTable(seg1) for l, r in LR0: if ST.query(l, r+1) > 0: print(0) exit() print(((dp[-1] * pow(3, F, mod))%mod * pow(2, N1 - len(LR1), mod))%mod) if __name__ == '__main__': main()