Skip to content

Commit

Permalink
♻️ Move pitch detection into composable
Browse files Browse the repository at this point in the history
  • Loading branch information
superbuggy committed Nov 5, 2024
1 parent ff2598c commit fe2556c
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 49 deletions.
34 changes: 13 additions & 21 deletions src/components/FretBoard.vue
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@ import FretBoardControls from "./FretBoardControls.vue";
import PopOver from "./PopOver.vue";
import { computed, ref } from "vue";
import { remPixels, isOdd, range, mapValueToRange } from "../helpers";
import { objectMap } from "../helpers";
import { remPixels, isOdd, range, mapValueToRange, objectMap } from "../helpers";
import { useGuitar } from "../state/guitar";
import { useTemperament } from "../state/temperament";
import { usePitchDetection } from "../state/usePitchDetection";
import { useFretBoardControls } from "../state/fretboard-controls";
import { useTone } from "../effects/tone";
const { playNote } = useTone();
const { pitch, clarity } = usePitchDetection();

Check failure on line 15 in src/components/FretBoard.vue

View workflow job for this annotation

GitHub Actions / Build

All destructured elements are unused.
const {
pitchClassNames,
divisionsPerOctave,
Expand Down Expand Up @@ -75,17 +76,15 @@ const stringNotes = computed((): Record<string, Record<string, any>> => {
tuning.value,
(_, pitchName) => dict[pitchName].frequency
);
console.log(stringRootFrequencies);
const notesWithDistances = objectMap(stringRootFrequencies, (string, rootFrequency) =>
scaleNotesOnStrings.value[string].map(({ note, fretNumber }) => ({
note,
fretNumber,
noteY: stringY(rootFrequency, note.frequency),
}))
);
// console.log(notes);
// objectMap(tuning.values, ([, pitchName]) => notes.find((note) => note));
console.log(notesWithDistances);
return notesWithDistances;
});
Expand All @@ -102,7 +101,6 @@ const fretDistancesFromNut = (divisions = divisionsPerOctave.value) => {
.map((note) => stringY(lowestStringRootFrequency, note.frequency));
return twoOctaves;
//
};
const startingFret = 0;
Expand Down Expand Up @@ -136,27 +134,21 @@ function resetPopUp() {
popUpY.value = NaN;
}
const fretDots = computed(() =>
shouldShow12TETFrets.value
? [3, 5, 7, 9, 12, 15, 17, 19, 21, 24]
: {
12: [3, 5, 7, 9, 12, 15, 17, 19, 21, 24],
16: [3, 5, 7, 9, 11, 13, 16, 19, 21, 23, 25, 27, 29, 32],
17: [4, 7, 10, 13, 17, 21, 24, 27, 30, 34],
24: [6, 10, 14, 18, 24, 30, 34, 38, 42, 48],
}[divisionsPerOctave.value]
const fretDots = computed(() => shouldShow12TETFrets.value
? [3, 5, 7, 9, 12, 15, 17, 19, 21, 24]
: {
12: [3, 5, 7, 9, 12, 15, 17, 19, 21, 24],
16: [3, 5, 7, 9, 11, 13, 16, 19, 21, 23, 25, 27, 29, 32],
17: [4, 7, 10, 13, 17, 21, 24, 27, 30, 34],
24: [6, 10, 14, 18, 24, 30, 34, 38, 42, 48],
}[divisionsPerOctave.value]
);
// const fretDotSpacing = shouldShow12TETFrets.value ?
// shouldShow12TETFrets
// ? (fretSpacing[fretDot - 1] + fretSpacing[fretDot - 2]) / 2 + y
// : fretSpacing[fretDot - 1] + y
const hue = (degree: number, upperBound: number) => (360 * degree) / upperBound;
const hsl = (degree: number, upperBound: number, l = 75) =>
`hsl(${hue(degree, upperBound)}, 100%, ${l}%)`;
const hslForNote = (note: { pitchClassNumber: number }, l = 50) => {
// if (!note) return;
const degree = selectedScale.value.pitchClassNumbers
.map((pitchClassNumber) => pitchClassNumber % selectedScale.value.period)
.indexOf(note.pitchClassNumber);
Expand Down
43 changes: 15 additions & 28 deletions src/components/PitchDetector.vue
Original file line number Diff line number Diff line change
@@ -1,39 +1,17 @@
<script setup lang="ts">
import { PitchDetector } from "pitchy";
import { usePitchDetection } from "../state/usePitchDetection";
import { ref, onMounted } from "vue";
const pitch = ref<number | null>(null);
const clarity = ref<number | null>(null);
const { pitch, clarity, audioContext } = usePitchDetection();

Check failure on line 5 in src/components/PitchDetector.vue

View workflow job for this annotation

GitHub Actions / Build

Property 'audioContext' does not exist on type '{ pitch: Ref<number | null, number | null>; clarity: Ref<number | null, number | null>; }'.
const button = ref<HTMLElement | null>(null);
function updatePitch(analyserNode: AnalyserNode, detector: PitchDetector<Float32Array>, input: Float32Array, sampleRate: number) {
analyserNode.getFloatTimeDomainData(input);
const [_pitch, _clarity] = detector.findPitch(input, sampleRate);
// pitchDisplay.value.textContent = `${
pitch.value = Math.round(_pitch * 10) / 10
// } Hz`;
// document.getElementById("clarity").textContent = `${Math.round(
clarity.value= _clarity * 100;
// )} %`;
window.setTimeout(
() => updatePitch(analyserNode, detector, input, sampleRate),
100,
);
}
const audioInputs = ref<MediaDeviceInfo[] | null>(null);
const selectedAudioInput = ref<string | null>(null);
onMounted(() => {
const audioContext = new window.AudioContext();
const analyserNode = audioContext.createAnalyser();
button.value?.addEventListener("click", () => audioContext.resume());
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
audioContext.createMediaStreamSource(stream).connect(analyserNode);
const detector = PitchDetector.forFloat32Array(analyserNode.fftSize);
detector.minVolumeDecibels = -30;
const input = new Float32Array(detector.inputLength);
updatePitch(analyserNode, detector, input, audioContext.sampleRate);
navigator.mediaDevices.enumerateDevices().then((devices) => {
audioInputs.value = devices.filter((device) => device.kind === "audioinput");
});
});
Expand All @@ -50,5 +28,14 @@ onMounted(() => {
<p>
<strong>Clarity:</strong> {{ clarity }} %
</p>
<select v-model="selectedAudioInput">
<option
v-for="input in audioInputs"
:key="input.deviceId"
:value="input.deviceId"
>
{{ input.label }}
</option>
</select>
</div>
</template>
33 changes: 33 additions & 0 deletions src/state/usePitchDetection.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
import { PitchDetector } from "pitchy";
import { ref, onMounted } from "vue";

export const audioContext = new window.AudioContext();
const pitch = ref<number | null>(null);
const clarity = ref<number | null>(null);

export function usePitchDetection() {
function updatePitch(analyserNode: AnalyserNode, detector: PitchDetector<Float32Array>, input: Float32Array, sampleRate: number) {
analyserNode.getFloatTimeDomainData(input);
const [_pitch, _clarity] = detector.findPitch(input, sampleRate);

pitch.value = Math.round(_pitch * 10) / 10
clarity.value = _clarity * 100;
window.setTimeout(
() => updatePitch(analyserNode, detector, input, sampleRate),
100,
);
}

onMounted(() => {
const analyserNode = audioContext.createAnalyser();
navigator.mediaDevices.getUserMedia({ audio: true }).then((stream) => {
audioContext.createMediaStreamSource(stream).connect(analyserNode);
const detector = PitchDetector.forFloat32Array(analyserNode.fftSize);
detector.minVolumeDecibels = -30;
const input = new Float32Array(detector.inputLength);
updatePitch(analyserNode, detector, input, audioContext.sampleRate);
});
});

return { pitch, clarity };
}

0 comments on commit fe2556c

Please sign in to comment.