Skip to content

Commit

Permalink
Revert "changed audio urls for ANTE type"
Browse files Browse the repository at this point in the history
  • Loading branch information
ishvindersethi22 authored May 2, 2024
1 parent 37765f8 commit 8e9c88d
Show file tree
Hide file tree
Showing 6 changed files with 13 additions and 112 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ const AllAudioTranscriptionLandingPage = () => {
const [advancedWaveformSettings, setAdvancedWaveformSettings] = useState(false);
const [assignedUsers, setAssignedUsers] = useState(null);
const [waveSurfer, setWaveSurfer] = useState(true);
const [audioURL, setAudioURL] = useState("");

const handleCollapseClick = () => {
!showNotes && setShowStdTranscript(false);
Expand Down Expand Up @@ -119,27 +118,6 @@ const AllAudioTranscriptionLandingPage = () => {
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `https://${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};
Expand Down Expand Up @@ -494,14 +472,11 @@ const AllAudioTranscriptionLandingPage = () => {
</Tooltip>
</Grid>
</Grid>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
taskData={taskData}
audioUrl={audioURL}
/>
}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -729,7 +704,7 @@ const AllAudioTranscriptionLandingPage = () => {
position="fixed"
bottom={1}
>
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings} />)}
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings} />}
</Grid>
</>
);
Expand Down
5 changes: 2 additions & 3 deletions src/ui/pages/container/CL-Transcription/AudioPanel.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@ import APITransport from "../../../../redux/actions/apitransport/apitransport";
const AudioPanel = memo( ({
setCurrentTime,
setPlaying,
taskData,
audioUrl
taskData
}) => {
const classes = AudioTranscriptionLandingStyle();
const dispatch = useDispatch();
Expand Down Expand Up @@ -68,7 +67,7 @@ const AudioPanel = memo( ({
id ="audio-panel"
controls
controlsList="nodownload"
src={audioUrl}
src={TaskDetails?.data?.audio_url}
preload="metadata"
type="audio"
// style={{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,6 @@ const AudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(true);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -294,27 +293,6 @@ const AudioTranscriptionLandingPage = () => {
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `https://${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
setLoading(false);
};
Expand Down Expand Up @@ -949,16 +927,14 @@ useEffect(() => {
filterMessage={filterMessage}
taskData={taskData}
/>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
handleAnnotationClick={handleAnnotationClick}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskData}
audioUrl={audioURL}
/>}
/>
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1234,7 +1210,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings}/>)}
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskData?.id} waveformSettings={waveformSettings}/>}
</Grid>
</>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,6 @@ import ArrowRightIcon from "@mui/icons-material/ArrowRight";
import ArrowBackIcon from "@mui/icons-material/ArrowBack";
import getTaskAssignedUsers from '../../../../utils/getTaskAssignedUsers';
import LightTooltip from "../../component/common/Tooltip"
import configs from '../../../../config/config';

const ReviewAudioTranscriptionLandingPage = () => {
const classes = AudioTranscriptionLandingStyle();
Expand Down Expand Up @@ -117,7 +116,6 @@ const ReviewAudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(true);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -310,29 +308,7 @@ const ReviewAudioTranscriptionLandingPage = () => {
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `https://${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
}}
setLoading(false);
};

Expand Down Expand Up @@ -1149,16 +1125,13 @@ useEffect(() => {
disableButton={disableButton}
anchorEl={anchorEl} setAnchorEl={setAnchorEl}
/>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskDetailList}
audioUrl={audioURL}
/>
}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1459,7 +1432,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings}/>)}
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings}/>}
</Grid>
</>
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,6 @@ const SuperCheckerAudioTranscriptionLandingPage = () => {
const [autoSave, setAutoSave] = useState(true);
const [waveSurfer, setWaveSurfer] = useState(false);
const [autoSaveTrigger, setAutoSaveTrigger] = useState(false);
const [audioURL, setAudioURL] = useState("");

// useEffect(() => {
// let intervalId;
Expand Down Expand Up @@ -230,29 +229,7 @@ const SuperCheckerAudioTranscriptionLandingPage = () => {
setWaveSurfer(false);
}else{
setWaveSurfer(true);
}
const fetchAudioData = await fetch(String(resp?.data?.audio_url).replace("https://asr-transcription.objectstore.e2enetworks.net/", `https://${configs.BASE_URL_AUTO}/task/get_audio_file/?audio_url=`), {
method: "GET",
headers: ProjectObj.getHeaders().headers
})
if (!fetchAudioData.ok){
setAudioURL(resp?.data?.audio_url)
}else{
try {
var base64data = await fetchAudioData.json();
var binaryData = atob(base64data);
var buffer = new ArrayBuffer(binaryData.length);
var view = new Uint8Array(buffer);
for (var i = 0; i < binaryData.length; i++) {
view[i] = binaryData.charCodeAt(i);
}
var blob = new Blob([view], { type: 'audio/mpeg' });
setAudioURL(URL.createObjectURL(blob));
} catch {
setAudioURL(resp?.data?.audio_url)
}
}
}
}}
setLoading(false);
};

Expand Down Expand Up @@ -993,17 +970,14 @@ useEffect(() => {
anchorEl={anchorEl}
setAnchorEl={setAnchorEl}
/>
{audioURL &&
<AudioPanel
setCurrentTime={setCurrentTime}
setPlaying={setPlaying}
// handleAnnotationClick={handleAnnotationClick}
onNextAnnotation={onNextAnnotation}
AnnotationsTaskDetails={AnnotationsTaskDetails}
taskData={taskDetailList}
audioUrl={audioURL}
/>
}
<Grid container spacing={1} sx={{ pt: 1, pl: 2, pr : 3}} justifyContent="flex-end">
<Stack spacing={2} direction="row" sx={{ mb: 1 }} alignItems="center" justifyContent="flex-end" width="fit-content">
<Typography fontSize={14} fontWeight={"medium"} color="#555">
Expand Down Expand Up @@ -1275,7 +1249,7 @@ useEffect(() => {
bottom={1}
// style={fullscreen ? { visibility: "hidden" } : {}}
>
{audioURL && (waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings} />)}
{waveSurfer ? <Timeline2 key={taskDetails?.data?.audio_url} details={taskDetails} waveformSettings={waveSurferWaveformSettings}/> : <Timeline currentTime={currentTime} playing={playing} taskID={taskDetailList} waveformSettings={waveformSettings} />}
</Grid>
</>
);
Expand Down
4 changes: 4 additions & 0 deletions src/ui/pages/container/CL-Transcription/wavesurfer.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
barRadius: waveformSettings.barRadius,
barHeight: waveformSettings.barHeight,
mediaControls: true,
url: details?.data?.audio_url,
hideScrollbar: true,
autoCenter: true,
autoScroll: true,
Expand Down Expand Up @@ -114,6 +115,7 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
miniMap.current = WaveSurfer.create({
container: document.querySelector('#minimap'),
height: '20',
url: details?.data?.audio_url,
hideScrollbar: true,
mediaControls: true,
media: document.querySelector('audio'),
Expand All @@ -138,6 +140,7 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
barRadius: waveformSettings.barRadius,
barHeight: waveformSettings.barHeight,
mediaControls: true,
url: details?.data?.audio_url,
hideScrollbar: true,
autoCenter: true,
autoScroll: true,
Expand Down Expand Up @@ -187,6 +190,7 @@ const Timeline2 = ({ key, details, waveformSettings }) => {
miniMap.current = WaveSurfer.create({
container: document.querySelector('#minimap'),
height: '20',
url: details?.data?.audio_url,
mediaControls: true,
media: document.querySelector('audio'),
hideScrollbar: true,
Expand Down

0 comments on commit 8e9c88d

Please sign in to comment.