-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.jl
136 lines (104 loc) · 3.31 KB
/
utils.jl
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
using Flux: onehotbatch, crossentropy
using ProgressMeter
using Random
using BSON
const datadir = "sR_bson_subset"
const PHONES = split("h# q eh dx iy r ey ix tcl sh ow z s hh aw m t er l w aa hv ae dcl y axr d kcl k ux ng gcl g ao epi ih p ay v n f jh ax en oy dh pcl ah bcl el zh uw pau b uh th ax-h em ch nx eng")
phn2num = Dict(phone=>i for (i, phone) in enumerate(PHONES))
phn2num["sil"] = 1
function loadData()
Xs, Ys = Vector(), Vector()
println("Loading data")
@showprogress for fname in readdir(datadir)
BSON.@load joinpath(datadir, fname) mfccs labs
mfccs = [mfccs[i,:] for i=1:size(mfccs, 1)]
push!(Xs, mfccs)
labs = [phn2num[lab] for lab in vec(labs)]
labs = onehotbatch(labs, collect(1:61))
labs = [labs[:,i] for i=1:size(labs, 2)]
push!(Ys, labs)
end
return [x for x in Xs], [y for y in Ys]
end
function makeBatches(d, batchSize)
batches = []
for i=1:floor(Int64, length(d) / batchSize)
startI = (i - 1) * batchSize + 1
lastI = min(startI + batchSize - 1, length(d))
batch = d[startI:lastI]
batch = Flux.batchseq(batch, zeros(length(batch[1][1])))
push!(batches, batch)
end
return batches
end
function shuffleData(Xs, Ys)
indices = collect(1:length(Xs))
shuffle!(indices)
return Xs[indices], Ys[indices]
end
function prepData(Xs, Ys)
XsShuffled, YsShuffled = shuffleData(Xs, Ys)
XsBatched = makeBatches(XsShuffled, BATCH_SIZE)
YsBatched = makeBatches(YsShuffled, BATCH_SIZE)
return collect(zip(XsBatched, YsBatched))
end
function calculateCrossEntropy(wordName, model)
BSON.@load "$(joinpath(datadir, wordName)).bson" mfccs labs
mfccs = [mfccs[i,:] for i=1:size(mfccs, 1)]
labs = [phn2num[lab] for lab in vec(labs)]
labs = onehotbatch(labs, collect(1:61))
labs = [labs[:,i] for i=1:size(labs, 2)]
Flux.reset!(model)
return Float64.(data.(crossentropy.(model.(mfccs), labs)))
end
function findBoundaries(wordName)
BSON.@load "$(joinpath(datadir, wordName)).bson" labs
boundaries = [0]
curr = labs[1]
for (i, l) in enumerate(labs)
if l != curr
push!(boundaries, i)
curr = l
end
end
return boundaries
end
function transcribe(wordName)
BSON.@load "$(joinpath(datadir, wordName)).bson" labs
phonemes = [labs[1]]
for lab in labs[2:end]
if lab != phonemes[length(phonemes)]
push!(phonemes, lab)
end
end
return phonemes
end
function makeTicks(boundaries, labels)
ticks = []
for (b, l) in zip(boundaries, labels)
if l == "sil"
l = "#"
elseif l == "ao"
l = "aa"
end
push!(ticks, uppercase("$(b)\n$(l)"))
end
return (boundaries, ticks)
end
function entropy(wordName, model)
BSON.@load "$(joinpath(datadir, wordName)).bson" mfccs labs
mfccs = [mfccs[i,:] for i=1:size(mfccs, 1)]
yhat = model.(mfccs)
Flux.reset!(model)
return Float64.([-1 * sum(y .* log.(y)) for y in yhat])
end
function entropyPlot(wordName, model; legendLocation=:bottomright)
ent = entropy(wordName, model)
t = Float64.(collect(1:length(ent)))
b = findBoundaries(wordName)
ent = predict(loess(t, ent), t)
p = plot(t, ent, lab="Entropy", title=wordName, ylab="Entropy", xlab="Frame number", legend=legendLocation)
ticks = makeTicks(b, transcribe(wordName))
p = vline!(b, lab="Boundaries", xticks=ticks)
return p
end