index.html 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199
  1. <!DOCTYPE html>
  2. <head>
  3. <style>
  4. * {
  5. font-family: sans-serif;
  6. }
  7. pre {
  8. font-family: monospace;
  9. }
  10. a {
  11. font-family: sans-serif;
  12. }
  13. audio {
  14. width: 100%;
  15. }
  16. canvas {
  17. width: 100%;
  18. height: 0;
  19. transition: all linear 0.1s;
  20. }
  21. .canvas-active {
  22. height: 15vh;
  23. }
  24. pre {
  25. overflow: scroll;
  26. }
  27. </style>
  28. </head>
  29. <body>
  30. <h1>听歌识曲 Demo (Credit: <a href="https://github.com/mos9527/ncm-afp" target="_blank">https://github.com/mos9527/ncm-afp</a>)</h1>
  31. <hr>
  32. <p><b>DISCLAIMER: </b></p>
  33. <p>This site uses the offical NetEase audio matcher APIs (reverse engineered from <a
  34. href="https://fn.music.163.com/g/chrome-extension-home-page-beta/">https://fn.music.163.com/g/chrome-extension-home-page-beta/</a>)
  35. </p>
  36. <p>And DOES NOT condone copyright infringment nor intellectual property theft.</p>
  37. <hr>
  38. <p><b>NOTE:</b></p>
  39. <p>Before start using the site, you may want to access this link first:</p>
  40. <a href="https://cors-anywhere.herokuapp.com/corsdemo">https://cors-anywhere.herokuapp.com/corsdemo</a>
  41. <p>Since Netease APIs do not have CORS headers, this is required to alleviate this restriction.</p>
  42. <hr>
  43. <p>Usage:</p>
  44. <li>Select your audio file through "Choose File" picker</li>
  45. <li>Hit the "Clip" button and wait for the results!</li>
  46. <audio id="audio" controls autoplay></audio>
  47. <canvas id="canvas"></canvas>
  48. <button id="invoke">Clip</button>
  49. <input type="file" name="picker" accept="*" id="file">
  50. <hr>
  51. <label for="use-mic">Mix in Microphone input</label>
  52. <input type="checkbox" name="use-mic" id="usemic">
  53. <hr>
  54. <pre id="logs"></pre>
  55. </body>
  56. <script src="./afp.wasm.js"></script>
  57. <script src="./afp.js"></script>
  58. <script type="module">
  59. const duration = 3
  60. let audioCtx, recorderNode, micSourceNode
  61. let audioBuffer, bufferHealth
  62. let audio = document.getElementById('audio')
  63. let file = document.getElementById('file')
  64. let clip = document.getElementById('invoke')
  65. let usemic = document.getElementById('usemic')
  66. let canvas = document.getElementById('canvas')
  67. let canvasCtx = canvas.getContext('2d')
  68. let logs = document.getElementById('logs')
  69. logs.write = line => logs.innerHTML += line + '\n'
  70. function RecorderCallback(channelL) {
  71. let sampleBuffer = new Float32Array(channelL.subarray(0, duration * 8000))
  72. GenerateFP(sampleBuffer).then(FP => {
  73. logs.write(`[index] Generated FP ${FP}`)
  74. logs.write('[index] Now querying, please wait...')
  75. fetch(
  76. '/audio/match?' +
  77. new URLSearchParams({
  78. duration: duration, audioFP: FP
  79. }), {
  80. method: 'POST'
  81. }).then(resp => resp.json()).then(resp => {
  82. if (!resp.data.result) {
  83. return logs.write('[index] Query failed with no results.')
  84. }
  85. logs.write(`[index] Query complete. Results=${resp.data.result.length}`)
  86. for (var song of resp.data.result) {
  87. logs.write(
  88. `[result] <a target="_blank" href="https://music.163.com/song?id=${song.song.id}">${song.song.name} - ${song.song.album.name} (${song.startTime / 1000}s)</a>`
  89. )
  90. }
  91. })
  92. })
  93. }
  94. function InitAudioCtx() {
  95. // AFP.wasm can't do it with anything other than 8KHz
  96. audioCtx = new AudioContext({ 'sampleRate': 8000 })
  97. if (audioCtx.state == 'suspended')
  98. return false
  99. let audioNode = audioCtx.createMediaElementSource(audio)
  100. audioCtx.audioWorklet.addModule('rec.js').then(() => {
  101. recorderNode = new AudioWorkletNode(audioCtx, 'timed-recorder')
  102. audioNode.connect(recorderNode) // recorderNode doesn't output anything
  103. audioNode.connect(audioCtx.destination)
  104. recorderNode.port.onmessage = event => {
  105. switch (event.data.message) {
  106. case 'finished':
  107. RecorderCallback(event.data.recording)
  108. clip.innerHTML = 'Clip'
  109. clip.disabled = false
  110. canvas.classList.remove('canvas-active')
  111. break
  112. case 'bufferhealth':
  113. clip.innerHTML = `${(duration * (1 - event.data.health)).toFixed(2)}s`
  114. bufferHealth = event.data.health
  115. audioBuffer = event.data.recording
  116. break
  117. default:
  118. logs.write(event.data.message)
  119. }
  120. }
  121. // Attempt to get user's microphone and connect it to the AudioContext.
  122. navigator.mediaDevices.getUserMedia({
  123. audio: {
  124. echoCancellation: false,
  125. autoGainControl: false,
  126. noiseSuppression: false,
  127. latency: 0,
  128. },
  129. }).then(micStream => {
  130. micSourceNode = audioCtx.createMediaStreamSource(micStream);
  131. micSourceNode.connect(recorderNode)
  132. usemic.checked = true
  133. logs.write('[rec.js] Microphone attached.')
  134. });
  135. });
  136. return true
  137. }
  138. clip.addEventListener('click', event => {
  139. recorderNode.port.postMessage({
  140. message: 'start', duration: duration
  141. })
  142. clip.disabled = true
  143. canvas.classList.add('canvas-active')
  144. })
  145. usemic.addEventListener('change', event => {
  146. if (!usemic.checked)
  147. micSourceNode.disconnect(recorderNode)
  148. else
  149. micSourceNode.connect(recorderNode)
  150. })
  151. file.addEventListener('change', event => {
  152. file.files[0].arrayBuffer().then(
  153. async buffer => {
  154. logs.write(`[index] File ${file.files[0].name} loaded.`)
  155. audio.src = window.URL.createObjectURL(new Blob([buffer]))
  156. clip.disabled = false
  157. })
  158. })
  159. function UpdateCanvas() {
  160. let w = canvas.clientWidth, h = canvas.clientHeight
  161. canvas.width = w, canvas.height = h
  162. canvasCtx.fillStyle = 'rgba(0,0,0,0)';
  163. canvasCtx.fillRect(0, 0, w, h);
  164. if (audioBuffer) {
  165. canvasCtx.fillStyle = 'black';
  166. for (var x = 0; x < w * bufferHealth; x++) {
  167. var y = audioBuffer[Math.ceil((x / w) * audioBuffer.length)]
  168. var z = Math.abs(y) * h / 2
  169. canvasCtx.fillRect(x, h / 2 - (y > 0 ? z : 0), 1, z)
  170. }
  171. }
  172. requestAnimationFrame(UpdateCanvas)
  173. }
  174. UpdateCanvas()
  175. let requestCtx = setInterval(() => {
  176. try {
  177. if (InitAudioCtx()) { // Put this here so we don't have to deal with the 'user did not interact' thing
  178. clearInterval(requestCtx)
  179. logs.write('[rec.js] Audio Context started.')
  180. }
  181. } catch {
  182. // Fail silently
  183. }
  184. }, 100)
  185. </script>