forked from diego3g/video-to-reels
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathconvert.mjs
107 lines (83 loc) · 2.27 KB
/
convert.mjs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
#!/usr/bin/env zx
import { fold } from "./utils/fold-text.mjs";
import { calculateCuts } from "./utils/calculate-cuts.mjs";
const videoFile = argv["i"] || argv["input"];
if (!videoFile) {
console.log(`
${chalk.bold("No input file specified")}
${chalk.bold("Usage:")}
${chalk.bold("zx convert.mjs -i video.mp4 [options]")}
`);
process.exit(0);
}
if (argv["h"] || argv["help"]) {
console.log(`
${chalk.bold("Video to Reels")}
${chalk.bold("Usage:")}
zx convert.mjs [options]
${chalk.bold("Options:")}
-h | --help - Show help message
-i | --input - Path of the video to be converted
`);
}
const q = await question("Qual a pergunta? \n");
export async function QFilter() {
const listFilters = await $`ls ./assets/filters/`;
const filter = await question(
"\nEscolha um dos filtros acima e digite o nome abaixo (sem a extensão .CUBE): \n"
);
return {
filter,
listFilters,
};
}
const { filter } = await QFilter();
const video = await question("\nDigite o nome do arquivo a ser salvo: \n");
const videoOutput = "videos/";
const textArray = fold(q, 30, true);
const lineBreakedText = textArray.map(line => line.trim()).join("\n");
await fs.writeFile("./tmp/text.txt", lineBreakedText);
const fontSizes = {
1: "48",
2: "48",
3: "48",
4: "42",
5: "36",
};
const fontSizeByAmountOfLines = fontSizes[Math.min(textArray.length, 5)];
await $`
convert \
-font assets/roboto.ttf \
-fill black \
-pointsize ${fontSizeByAmountOfLines} \
-gravity center \
-annotate +10+95 @tmp/text.txt \
assets/overlay.png \
tmp/question.png`;
await $`
ffmpeg -y \
-i ${videoFile} \
-vf "transpose=1" \
-frames:v 1 \
tmp/frame.png`;
await $`node detect-face/face-detection.js`;
const faceResult = await fs.readJsonSync("./tmp/face.json");
const cut = calculateCuts(faceResult);
await $`
ffmpeg -y \
-i ${videoFile} \
-i tmp/question.png \
-c:v h264_videotoolbox \
-b:v 5000k \
-filter_complex "
transpose=1, \
scale=w=1080:h=1920, \
crop=1080:1350:0:${cut.top}, \
overlay=(main_w/2)-375:main_h-overlay_h-40, \
lut3d=assets/filters/${filter}.CUBE
" \
-af "
arnndn=m=assets/bd.rnnn:mix=0.9, \
loudnorm=I=-16:LRA=11:TP=-1.5 \
" \
"${videoOutput + video}.mp4"`;