“我已经开始在angular Web应用程序中进行音频记录。并且我已经成功获取了blob文件,但是我没有将此blob文件转换为wav或mp3文件。在这种情况下,您能帮我吗?”
“这是用于Angular7和打字稿Web应用程序的。我想从打字稿文件中获取它”
import { Observable } from 'rxjs';
import { Component, OnInit, Injectable, AfterViewInit } from '@angular/core';
import {
HttpClient,
} from '@angular/common/http';
import { HttpClientModule } from '@angular/common/http';
import { PARAMETERS } from '@angular/core/src/util/decorators';
import { ChangeDetectorRef } from '@angular/core';
import { MediaMatcher } from '@angular/cdk/layout';
import { map } from 'rxjs/operators';
import { Breakpoints, BreakpointObserver } from '@angular/cdk/layout';
//--------------------------------------------------Audio Record
import { AudioRecordingService } from '../service/audio-recording.service';
import { DomSanitizer } from '@angular/platform-browser';
//--------------------------------------------------Audio Record
@Component({
selector: 'app-commread',
templateUrl: './commread.component.html',
styleUrls: ['./commread.component.css']
})
export class CommreadComponent implements OnInit {
//--------------------------------------------------Audio Record
isRecording = false;
recordedTime;
blobUrl;
//--------------------------------------------------Audio Record
constructor(
private http: HttpClient,
private router: Router,
private route: ActivatedRoute,
//--------------------------------------------------Audio Record
private audioRecordingService: AudioRecordingService,
private sanitizer: DomSanitizer,
//--------------------------------------------------Audio Record
) {
//--------------------------------------------------Audio Record
this.audioRecordingService.recordingFailed().subscribe(() => {
this.isRecording = false;
});
this.audioRecordingService.getRecordedTime().subscribe((time) => {
this.recordedTime = time;
});
this.audioRecordingService.getRecordedBlob().subscribe((data) => {
this.blobUrl = this.sanitizer.bypassSecurityTrustUrl(URL.createObjectURL(data.blob));
});
//--------------------------------------------------Audio Record
}
ngOnInit(): void {
}
//--------------------------------------------------Audio Record
startRecording() {
if (!this.isRecording) {
this.isRecording = true;
this.audioRecordingService.startRecording();
}
console.log("Hello");
console.log("")
}
abortRecording() {
if (this.isRecording) {
this.isRecording = false;
this.audioRecordingService.abortRecording();
}
}
stopRecording() {
if (this.isRecording) {
this.audioRecordingService.stopRecording();
this.isRecording = false;
}
}
clearRecordedData() {
console.log(this.blobUrl);
console.log(this.blobUrl.changingThisBreaksApplicationSecurity);
console.log(this.blobUrl.size);
console.log(this.blobUrl.type);
// this.blobUrl = null;
// var fileReader = new FileReader();
// fileReader.onload = function () {
// var fs = require('fs');
// fs.writeFileSync('test.wav', Buffer(new Uint8Array(this.result)));
// };
// fileReader.readAsArrayBuffer(this.blobUrl);
}
//--------------------------------------------------Audio Record
}
<button class="start-button" mat-stroked-button *ngIf="!isRecording && !blobUrl" (click)="startRecording()">
<mat-icon>settings_voice</mat-icon>Start Recording
</button>
<button class="stop-button" mat-stroked-button *ngIf="isRecording && !blobUrl" (click)="stopRecording()">
<mat-icon>stop</mat-icon>Stop Recording
</button>
<br>
<button class="cancel-button" mat-stroked-button *ngIf="!isRecording && blobUrl"
(click)="clearRecordedData()">Clear Recording</button>
<div *ngIf="isRecording && !blobUrl"> {{recordedTime}} </div>
import { Injectable, NgZone } from '@angular/core';
import * as RecordRTC from 'recordrtc';
import * as moment from 'moment';
import { Observable, Subject } from 'rxjs';
import { isNullOrUndefined } from 'util';
interface RecordedAudioOutput {
blob: Blob;
title: string;
}
@Injectable()
export class AudioRecordingService {
private stream;
private recorder;
private interval;
private startTime;
private _recorded = new Subject<RecordedAudioOutput>();
private _recordingTime = new Subject<string>();
private _recordingFailed = new Subject<string>();
getRecordedBlob(): Observable<RecordedAudioOutput> {
return this._recorded.asObservable();
}
getRecordedTime(): Observable<string> {
return this._recordingTime.asObservable();
}
recordingFailed(): Observable<string> {
return this._recordingFailed.asObservable();
}
startRecording() {
if (this.recorder) {
// It means recording is already started or it is already recording something
return;
}
this._recordingTime.next('00:00');
navigator.mediaDevices.getUserMedia({ audio: true }).then(s => {
this.stream = s;
this.record();
}).catch(error => {
this._recordingFailed.next();
});
}
abortRecording() {
this.stopMedia();
}
private record() {
this.recorder = new RecordRTC.StereoAudioRecorder(this.stream, {
type: 'audio',
mimeType: 'audio/wav'
});
this.recorder.record();
this.startTime = moment();
this.interval = setInterval(
() => {
const currentTime = moment();
const diffTime = moment.duration(currentTime.diff(this.startTime));
const time = this.toString(diffTime.minutes()) + ':' + this.toString(diffTime.seconds());
this._recordingTime.next(time);
},
1000
);
}
private toString(value) {
let val = value;
if (!value) {
val = '00';
}
if (value < 10) {
val = '0' + value;
}
return val;
}
stopRecording() {
if (this.recorder) {
this.recorder.stop((blob) => {
if (this.startTime) {
const mp3Name = encodeURIComponent('audio_' + new Date().getTime() + '.mp3');
this.stopMedia();
this._recorded.next({ blob: blob, title: mp3Name });
}
}, () => {
this.stopMedia();
this._recordingFailed.next();
});
}
}
private stopMedia() {
if (this.recorder) {
this.recorder = null;
clearInterval(this.interval);
this.startTime = null;
if (this.stream) {
this.stream.getAudioTracks().forEach(track => track.stop());
this.stream = null;
}
}
}
}
“我希望在控制台中输出wav文件。”