绍兴公厕前端
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

368 lines
18 KiB

3 years ago
  1. var WorkerManager = function () {
  2. function a() {
  3. O = !0, o = this
  4. }
  5. function b() {
  6. return X
  7. }
  8. function c() {
  9. null !== z && z(!1)
  10. }
  11. function d(b) {
  12. var c = b.data;
  13. switch (c.type) {
  14. case"WorkerReady":
  15. yb && yb();
  16. break;
  17. case"canvasRender":
  18. k(0, "currentTime"), i(c.data), ub++, 0 === tb && (tb = performance.now());
  19. break;
  20. case"initSegment":
  21. X = c.data, j();
  22. break;
  23. case"mediaSample":
  24. null === Z.samples && (Z.samples = new Array(jb)), null === c.data.frame_time_stamp && (c.data.frameDuration = Math.round(pb / L)), 1 !== ib && (c.data.frameDuration = pb / Math.abs(ib)), Z.samples[$++] = c.data, nb += c.data.frameDuration, ob += c.data.frameDuration, kb = jb;
  25. break;
  26. case"videoRender":
  27. var d = new Uint8Array(c.data.length + _);
  28. if (0 !== _ && d.set(ab), d.set(c.data, _), ab = d, _ = ab.length, $ % jb === 0 && 0 !== $) {
  29. if (null !== Z.samples[0].frameDuration ? (Z.baseMediaDecodeTime = 1 === bb ? 0 : mb, mb = nb) : Z.baseMediaDecodeTime = Math.round(pb / L) * jb * (bb - 1), "chrome" == I && 1 === ib) for (var e = Z.samples.length, f = ob / jb, g = 0; e > g; g++) Z.samples[g].frameDuration = f;
  30. ob = 0, Y = mp4Remux.mediaSegment(bb, Z, ab, Z.baseMediaDecodeTime), bb++, $ = 0, ab = null, _ = 0, null !== W ? W.setMediaSegment(Y) : lb === !1 && (debug.log("workerManager::videoMS error!! recreate videoMS"), j()), null !== p && p.stopRendering()
  31. }
  32. break;
  33. case"mediasegmentData":
  34. W.setMediaSegment(c.data), lb === !1 && (debug.log("videoMS error!! recreate videoMS"), j());
  35. break;
  36. case"videoInfo":
  37. J = c.data;
  38. break;
  39. case"time":
  40. break;
  41. case"videoTimeStamp":
  42. if (eb = c.data, null !== W && null !== eb && (W.setvideoTimeStamp(eb), W.getNoWaitFlag() && (W.getDuration() || 0 === W.getDuration()))) {
  43. var h = eb.timestamp - parseInt(W.getDuration());
  44. C({timestamp: h})
  45. }
  46. break;
  47. case"firstFrame":
  48. p.startRendering(), "undefined" != typeof p.setFPS && p.setFPS(L);
  49. break;
  50. case"drop":
  51. break;
  52. case"codecInfo":
  53. cb = c.data, null !== W && W.setCodecInfo(cb);
  54. break;
  55. case"stepPlay":
  56. switch (c.data) {
  57. case"needBuffering":
  58. R = !0, w("request", T);
  59. break;
  60. case"BufferFull":
  61. if (R = !1, w("complete"), Bb) {
  62. var m = {type: "stepPlay", data: "findIFrame"};
  63. l.postMessage(m), p.startRendering(), Bb = !1
  64. }
  65. }
  66. break;
  67. case"setVideoTagMode":
  68. a.prototype.setLiveMode(c.data);
  69. break;
  70. case"playbackFlag":
  71. Ab.type = c.data === !0 ? "playback" : "live", null !== W && W.setPlaybackFlag(c.data);
  72. break;
  73. case"error":
  74. null !== A && A(c.data);
  75. break;
  76. case"MSEResolutionChanged":
  77. E(c.data);
  78. break;
  79. case"DecodeStart":
  80. var n = c.data.width - 0, o = c.data.height - 0;
  81. S.setAttribute("width", n), S.setAttribute("height", o), a.prototype.setLiveMode(c.data.decodeMode), B(c.data);
  82. break;
  83. case"ivsDraw":
  84. var q = c.data.ivsDraw, r = "canvas" === P ? J.timeStamp : eb;
  85. r = 1e3 * r.timestamp + r.timestamp_usec;
  86. var s = "canvas" === P ? r : r - parseInt(1e3 * W.getDuration());
  87. null !== G && (H.setCallback(G), H.draw(q, s, r));
  88. break;
  89. default:
  90. debug.log("workerManager::videoWorker unknown data = " + c.data)
  91. }
  92. }
  93. function e(a) {
  94. var b = a.data;
  95. switch (b.type) {
  96. case"render":
  97. if (V === !0) break;
  98. qb !== b.codec && (null !== q && (rb = q.getVolume(), sb = q.getInitVideoTimeStamp(), q.terminate()), "AAC" === b.codec ? "edge" === I || "firefox" === I ? (q = null, null !== A && A({errorCode: 201})) : q = new AudioPlayerAAC : (q = new AudioPlayerGxx, q.setSamplingRate(b.samplingRate)), null !== q && (q.setInitVideoTimeStamp(sb), q.audioInit(rb) || (q = null)), qb = b.codec), null !== q && (null === J || "undefined" == typeof J ? q.bufferAudio(b.data, b.rtpTimeStamp, null) : q.bufferAudio(b.data, b.rtpTimeStamp, J.codecType))
  99. }
  100. }
  101. function f(a) {
  102. var b = a.data;
  103. switch (b.type) {
  104. case"rtpData":
  105. v(b.data)
  106. }
  107. }
  108. function g(a) {
  109. var b = {type: "getRtpData", data: a};
  110. n.postMessage(b)
  111. }
  112. function h(a) {
  113. null !== W && (W.close(), W = null), jb = a === !1 ? hb : Math.abs(ib), Z.samples = new Array(jb), lb = !1, bb = 1, Y = null, $ = 0, ab = null, _ = 0
  114. }
  115. function i(a) {
  116. null !== a && null !== p && ("mjpeg" === J.codecType ? p.drawMJPEG(a, J.width, J.height, J.codecType, J.frameType, J.timeStamp) : p.draw(a, J.width, J.height, J.codecType, J.frameType, J.timeStamp))
  117. }
  118. function j() {
  119. lb = !0, null === W ? (W = VideoMediaSource(o), W.setCodecInfo(cb), W.setInitSegmentFunc(b), W.setVideoSizeCallback(c), W.setBeginDrawCallback(t), W.init(db), W.setSpeedPlay(ib)) : (W.getVideoElement(), W.setInitSegment()), W.setAudioStartCallback(k)
  120. }
  121. function k(a, b) {
  122. null !== q && q.setBufferingFlag(a, b)
  123. }
  124. var l = null, m = null, n = null, o = null, p = null, q = null, r = null, s = null, t = null, u = null, v = null,
  125. w = null, x = null, y = null, z = null, A = null, B = null, C = null, D = null, E = null, F = null, G = null,
  126. H = null, I = BrowserDetect(), J = null, K = null, L = 0, M = null, N = !1, O = !0, P = "", Q = !0, R = !1,
  127. S = null, T = null, U = null, V = !1, W = null, X = null, Y = null,
  128. Z = {id: 1, samples: null, baseMediaDecodeTime: 0}, $ = 0, _ = 0, ab = null, bb = 1, cb = "", db = null,
  129. eb = null, fb = 2, gb = 4, hb = "chrome" !== I ? gb : fb, ib = 1, jb = hb, kb = jb, lb = !1, mb = 0, nb = 0,
  130. ob = 0, pb = 1e3, qb = null, rb = 0, sb = 0, tb = 0, ub = 0, vb = 1e3, wb = null, xb = null, yb = null, zb = 0,
  131. Ab = {type: "live", codec: "", width: 0, height: 0, isLimitSpeed: null}, Bb = !1, Cb = null, Db = null,
  132. Eb = null, Fb = {5: "MJPEG", 8: "H264", 12: "H265"}, Gb = {
  133. 1: 4e3,
  134. 2: 8e3,
  135. 3: 11025,
  136. 4: 16e3,
  137. 5: 2e4,
  138. 6: 22050,
  139. 7: 32e3,
  140. 8: 44100,
  141. 9: 48e3,
  142. 10: 96e3,
  143. 11: 128e3,
  144. 12: 192e3,
  145. 13: 64e3
  146. };
  147. a.prototype = {
  148. init: function (a, b) {
  149. zb = 0, S = a, db = b;
  150. window.navigator.userAgent;
  151. l = new Worker("./static/videoWorker.js"), m = new Worker("./static/audioWorker.js"), l.onmessage = d, m.onmessage = e, p = new StreamDrawer(zb, this, S), H = IvsDraw(), p.setResizeCallback(s), xb = document.getElementById("count-fps"), wb = document.getElementById("span-fps")
  152. }, sendSdpInfo: function (a, b, c) {
  153. var d = {type: "sdpInfo", data: {sdpInfo: a, aacCodecInfo: b, decodeMode: P, govLength: M, checkDelay: Q}};
  154. if (N = c, l.postMessage(d), m.postMessage(d), N) try {
  155. window.AudioContext = window.AudioContext || window.webkitAudioContext || window.mozAudioContext || window.oAudioContext || window.msAudioContext, n = new Worker("./media/ump/Workers/audioTalkWorker.js"), n.onmessage = f, null === r && (r = new Talk, r.init(), r.setSendAudioTalkBufferCallback(g));
  156. var e = r.initAudioOut();
  157. n.postMessage(d), d = {type: "sampleRate", data: e}, n.postMessage(d)
  158. } catch (h) {
  159. return N = !1, void debug.error("Web Audio API is not supported in this web browser! : " + h)
  160. }
  161. qb = null, lb = !1, K = a
  162. }, parseRTPData: function (a, b) {
  163. function c() {
  164. for (var a = b[22] + 24, c = 24; a > c;) if (g == b[c]) {
  165. if (c + 4 > a) return debug.log("i: " + c), -1;
  166. M.width = b[c + 2] << 3, M.height = b[c + 3] << 3, c += 4
  167. } else if (h == b[c]) {
  168. if (c + 4 > b.length) return debug.log("i: " + c), -1;
  169. M.I_frame_interval = b[c + 1], M.encode_type = b[c + 2], M.frame_rate = b[c + 3], c += 4
  170. } else if (i == b[c]) M.width = (b[c + 5] << 8) + b[c + 4], M.height = (b[c + 7] << 8) + b[c + 6], c += 8; else if (j == b[c]) c += 4; else if (o == b[c]) c += 8; else if (k == b[c]) {
  171. if (c + 4 > a) return debug.log("i: " + c), -1;
  172. var d = (b[c + 2] << 8) + b[c + 3];
  173. c += d
  174. } else if (x == b[c]) M.h264_svc_flag = !0, M.svc = b[c + 2], c += 4; else if (q == b[c]) c += 8; else if (u == b[c]) c += 8; else if (C == b[c]) {
  175. var e = b[c + 1], f = b[c + 2];
  176. c += 8, c += e * f * 16
  177. } else if (E == b[c]) c += 8; else if (G == b[c]) c += 8; else if (v == b[c]) c += 8; else if (w == b[c]) c += 8; else if (y == b[c]) c += 8; else if (I <= b[c] && b[c] < J) M.timeStampmsw = (b[c + 3] << 8) + b[c + 2], c += 4; else if (J <= b[c] && b[c] < K) c += b[c + 1]; else if (n == b[c]) c += 4; else if (p == b[c]) c += 4; else if (r == b[c]) c += 4; else if (t == b[c]) c += 8; else if (A == b[c]) {
  178. var e = b[c + 1];
  179. c += 8, c += 16 * e
  180. } else if (B == b[c]) c += 4; else {
  181. if (H != b[c]) return debug.log("parseVideoInfo error ext_type:0x" + b[c]), debug.log("i: " + c), -1;
  182. var a = (b[c + 5] << 8) + b[c + 4];
  183. c += 8, c += a
  184. }
  185. }
  186. function d() {
  187. M.ChannelCount = 0;
  188. for (var a = b[22] + 24, c = 24; a > c;) if (g == b[c]) c += 4; else if (h == b[c]) c += 4; else if (i == b[c]) c += 8; else if (j == b[c]) c += 4; else if (s == b[c]) c += b[c + 1]; else if (o == b[c]) c += 8; else if (k == b[c]) {
  189. var d = b[c + 2] << 8 + b[c + 3];
  190. c += d
  191. } else if (z == b[c]) M.ChannelCount = b[c + 1], M.channel = b[c + 2], c += 4; else if (y == b[c]) c += 8; else {
  192. if (I != b[c]) return debug.log("parseAudioInfo error ext_type:0x" + b[c]), debug.log("i: " + c), -1;
  193. M.timeStampmsw = (b[c + 3] << 8) + b[c + 2], c += 4
  194. }
  195. 0 == M.ChannelCount && (M.ChannelCount = 1, M.channel = 0);
  196. for (var a = b[22] + 24, c = 24; a > c;) if (b[c] == g) c += 4; else if (b[c] == h) c += 4; else if (b[c] == i) c += 8; else if (b[c] == j) M.audio_type = b[c + 2], M.samplingRate = Gb[b[c + 3]], c += 4; else if (b[c] == s) c += b[c + 1]; else if (b[c] == o) c += 8; else if (b[c] == k) {
  197. var d = b[c + 2] << 8 + b[c + 3];
  198. c += d
  199. } else if (b[c] == z) c += 4; else if (b[c] == y) c += 8; else {
  200. if (I != b[c]) return debug.log("parseAudioInfo error ext_type:0x" + b[c]), debug.log("i: " + c), -1;
  201. c += 4
  202. }
  203. }
  204. function e() {
  205. for (var a = b[22] + 24, c = 24; a > c;) if (I <= b[c] && b[c] < J) M.timeStampmsw = (b[c + 3] << 8) + b[c + 2], c += 4; else if (k == b[c]) {
  206. if (c + 4 > a) return debug.log("i: " + c), -1;
  207. debug.log("智能扩展");
  208. var d = (b[c + 2] << 8) + b[c + 3];
  209. c += d
  210. } else c++
  211. }
  212. var f = b[4], g = 128, h = 129, i = 130, j = 131, k = 132, n = 133, o = 136, p = 137, q = 138, r = 139,
  213. s = 140, t = 144, u = 145, v = 146, w = 147, x = 148, y = 149, z = 150, A = 151, B = 152, C = 153,
  214. E = 154, G = 155, H = 156, I = 160, J = 176, K = 255,
  215. L = {type: "MediaData", data: {rtspInterleave: a, payload: b}, info: null}, M = {};
  216. if (253 == f || 254 == f || 252 == f || 251 == f) {
  217. if (c(), null != Cb) {
  218. if (Cb != M.encode_type) return Cb = M.encode_type, void D(Fb[M.encode_type])
  219. } else Cb = M.encode_type;
  220. switch (M.encode_type + "") {
  221. case"2":
  222. case"5":
  223. case"8":
  224. case"12":
  225. l && (L.info = M, l.postMessage(L));
  226. break;
  227. default:
  228. debug.log("encode_type: " + M.encode_type)
  229. }
  230. } else if (240 == f) {
  231. if (d(), null != Eb) {
  232. if (Eb != M.audio_type) return Eb = M.audio_type, void F("audioType")
  233. } else Eb = M.audio_type;
  234. if (null != Db) {
  235. if (Db != M.samplingRate) return Db = M.samplingRate, void F("samplingRate")
  236. } else Db = M.samplingRate;
  237. switch (M.audio_type + "") {
  238. case"10":
  239. case"14":
  240. case"26":
  241. case"27":
  242. case"28":
  243. case"29":
  244. case"30":
  245. m && (L.info = M, m.postMessage(L))
  246. }
  247. } else 241 == f ? (e(), l && (L.info = M, l.postMessage(L))) : debug.log("mediaType: " + f)
  248. }, setCallback: function (a, b) {
  249. switch (a) {
  250. case"timeStamp":
  251. u = b;
  252. break;
  253. case"ResolutionChanged":
  254. s = b, null !== p && p.setResizeCallback(s);
  255. break;
  256. case"audioTalk":
  257. v = b;
  258. break;
  259. case"stepRequest":
  260. w = b;
  261. break;
  262. case"metaEvent":
  263. x = b;
  264. break;
  265. case"videoMode":
  266. y = b;
  267. break;
  268. case"loadingBar":
  269. z = b;
  270. break;
  271. case"Error":
  272. A = b;
  273. break;
  274. case"PlayStart":
  275. t = b, null !== p && p.setBeginDrawCallback(t);
  276. break;
  277. case"DecodeStart":
  278. B = b;
  279. break;
  280. case"UpdateCanvas":
  281. C = b, null !== p && p.setupdateCanvasCallback(C);
  282. break;
  283. case"FrameTypeChange":
  284. D = b;
  285. break;
  286. case"MSEResolutionChanged":
  287. E = b;
  288. break;
  289. case"audioChange":
  290. F = b;
  291. break;
  292. case"WorkerReady":
  293. yb = b;
  294. break;
  295. case"IvsDraw":
  296. G = b;
  297. break;
  298. default:
  299. debug.log(a), debug.log("workerManager::setCallback() : type is unknown")
  300. }
  301. }, capture: function (a) {
  302. "canvas" === P ? p.capture(a) : W.capture(a)
  303. }, setDeviceInfo: function (a) {
  304. U = a.mode
  305. }, setFPS: function (a) {
  306. var b = 30;
  307. L = 0 === a ? b : a, h(1 !== ib)
  308. }, setGovLength: function (a) {
  309. M = a
  310. }, setLiveMode: function (a) {
  311. if (a === 'canvas') S.style.display = 'block';
  312. else if (a === 'video') db.style.display = 'block';
  313. null !== y && y(a), P = null === a ? "canvas" : a, "video" === P ? null !== p && p.renewCanvas() : "canvas" === P && h(!1);
  314. }, controlAudio: function (a, b) {
  315. switch (debug.log(a + " " + b), a) {
  316. case"audioPlay":
  317. "start" === b ? null !== q && q.play() : (rb = 0, null !== q && q.stop());
  318. break;
  319. case"volumn":
  320. rb = b, null !== q && q.controlVolumn(b);
  321. break;
  322. case"audioSamplingRate":
  323. null !== q && q.setSamplingRate(b)
  324. }
  325. }, controlAudioTalk: function (a, b) {
  326. if (null !== r) switch (a) {
  327. case"onOff":
  328. "on" === b || r.stopAudioOut();
  329. break;
  330. case"volumn":
  331. r.controlVolumnOut(b)
  332. }
  333. }, reassignCanvas: function () {
  334. null !== p && p.reassignCanvas()
  335. }, digitalZoom: function (a) {
  336. null !== p && p.digitalZoom(a)
  337. }, playbackSpeed: function (a) {
  338. ib = a, p.setFrameInterval(ib)
  339. }, timeStamp: function () {
  340. }, initVideo: function (a) {
  341. h(a)
  342. }, setFpsFrame: function (a) {
  343. vb = a, ub = 0, tb = 0
  344. }, setCheckDelay: function (a) {
  345. Q = a
  346. }, initStartTime: function () {
  347. var a = {type: "initStartTime"};
  348. l.postMessage(a), p.stopRendering(), p.startRendering()
  349. }, terminate: function () {
  350. "backup" !== U && (l && (l.terminate(), l = null), m && (m.terminate(), m = null)), n && n.terminate(), r && (r.terminate(), r = null), p && p.terminate(), q && q.terminate(), W && W.terminate(), yb && (yb = null), p = null, O = !0
  351. }
  352. };
  353. return new a
  354. };