whatever, let's vibe coding
This commit is contained in:
@@ -3,6 +3,8 @@
|
||||
import {spawn} from 'child_process';
|
||||
import {readdir} from 'fs/promises';
|
||||
import {createInterface} from 'node:readline';
|
||||
import {runInBackground} from '../lib/ai-generated.ts';
|
||||
import { extname } from 'node:path';
|
||||
|
||||
|
||||
function formatTime(seconds: number): string {
|
||||
@@ -39,9 +41,10 @@ async function getDuration(inputFile: string): Promise<number> {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
async function compressVideo(inputFile: string, outputFile: string) {
|
||||
const duration = await getDuration(inputFile);
|
||||
return new Promise<void>(resolve => {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
try {
|
||||
const ffmpeg = spawn('ffmpeg', [
|
||||
'-hwaccel', 'cuda',
|
||||
@@ -93,10 +96,11 @@ async function compressVideo(inputFile: string, outputFile: string) {
|
||||
console.log(); // Новая строка в конце
|
||||
if (code === 0) {
|
||||
console.log(`✅ Completed: ${outputFile}`);
|
||||
resolve();
|
||||
} else {
|
||||
console.error(`❌ Failed with code ${code}`);
|
||||
reject();
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
|
||||
ffmpeg.stderr.on('data', (data) => {
|
||||
@@ -109,13 +113,17 @@ async function compressVideo(inputFile: string, outputFile: string) {
|
||||
});
|
||||
}
|
||||
|
||||
const files = await readdir('.');
|
||||
const mp4Files = files.filter(file => file.match(/^VID_.*\.mp4$/));
|
||||
const filesCount = mp4Files.length;
|
||||
const files = (await readdir('.')).filter(name => {
|
||||
return (!name.startsWith('HEVC') && ['mp4', 'mov'].includes(extname(name).toLowerCase().slice(1)))
|
||||
});
|
||||
let filesCount = files.length;
|
||||
|
||||
for (let i = 0; i < filesCount; i++){
|
||||
const file = mp4Files[i];
|
||||
const outputFile = `HEVC_${file.slice(4)}`;
|
||||
const file = files[i];
|
||||
|
||||
// const outputFile = `HEVC_${file.slice(4)}`;
|
||||
const outputFile = `HEVC_${file}`;
|
||||
console.log(`\nProcessing: ${file} [${i + 1}/${filesCount}]`);
|
||||
await compressVideo(file, outputFile);
|
||||
// runInBackground('identity', [file, outputFile])
|
||||
}
|
||||
|
||||
29
bin/gpt.ts
Executable file
29
bin/gpt.ts
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Ollama } from 'ollama';
|
||||
import {getStdin} from '../lib/ai-generated.ts';
|
||||
|
||||
const ollama = new Ollama({ host: 'http://127.0.0.1:11434' });
|
||||
const model = 'gemma3:12b';
|
||||
// const model = 'codellama:13b';
|
||||
// const model = 'aya:8b';
|
||||
|
||||
|
||||
async function streamChat() {
|
||||
let [,,message] = process.argv;
|
||||
if (!message) return;
|
||||
const file = getStdin();
|
||||
const stream = await ollama.chat({
|
||||
model,
|
||||
messages: [
|
||||
{ role: 'user', content: `${message}\n${file}`},
|
||||
],
|
||||
stream: true,
|
||||
});
|
||||
|
||||
for await (const chunk of stream) {
|
||||
process.stdout.write(chunk.message.content);
|
||||
}
|
||||
}
|
||||
|
||||
await streamChat();
|
||||
10
bin/tutorial.ts
Normal file
10
bin/tutorial.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
function sortingAlgorithm(x: number[]): number[] {
|
||||
for (let i = 0; i < x.length; i++) {
|
||||
for (let j = 0; j < x.length - 1; j++) {
|
||||
if (x[j] > x[j + 1]) {
|
||||
[x[j], x[j + 1]] = [x[j + 1], x[j]];
|
||||
}
|
||||
}
|
||||
}
|
||||
return x;
|
||||
}
|
||||
Reference in New Issue
Block a user