mirror of
https://github.com/danog/telegram-tt.git
synced 2025-01-21 21:01:29 +01:00
Audio Player: Fix queue order, memory leaks and reduce calculations (#1543)
This commit is contained in:
parent
1a6f0bb28f
commit
50865c78f5
@ -125,7 +125,6 @@ const Audio: FC<OwnProps> = ({
|
||||
makeTrackId(message),
|
||||
getMediaDuration(message)!,
|
||||
isVoice ? 'voice' : 'audio',
|
||||
origin,
|
||||
mediaData,
|
||||
bufferingHandlers,
|
||||
undefined,
|
||||
@ -172,9 +171,10 @@ const Audio: FC<OwnProps> = ({
|
||||
onPlay(message.id, message.chatId);
|
||||
}
|
||||
|
||||
getDispatch().setAudioPlayerOrigin({ origin });
|
||||
setIsActivated(!isActivated);
|
||||
playPause();
|
||||
}, [isPlaying, isUploading, message.id, message.chatId, onCancelUpload, onPlay, playPause, isActivated]);
|
||||
}, [isUploading, isPlaying, isActivated, playPause, onCancelUpload, onPlay, message.id, message.chatId, origin]);
|
||||
|
||||
useEffect(() => {
|
||||
if (onReadMedia && isMediaUnread && (isPlaying || isDownloading)) {
|
||||
|
@ -77,7 +77,7 @@ const AudioResults: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
}, [focusMessage]);
|
||||
|
||||
const handlePlayAudio = useCallback((messageId: number, chatId: string) => {
|
||||
openAudioPlayer({ chatId, messageId, origin: AudioOrigin.Search });
|
||||
openAudioPlayer({ chatId, messageId });
|
||||
}, [openAudioPlayer]);
|
||||
|
||||
function renderList() {
|
||||
|
@ -3,7 +3,7 @@ import React, {
|
||||
} from '../../lib/teact/teact';
|
||||
import { getGlobal, withGlobal } from '../../lib/teact/teactn';
|
||||
|
||||
import { AudioOrigin, LangCode } from '../../types';
|
||||
import { LangCode } from '../../types';
|
||||
import { GlobalActions } from '../../global/types';
|
||||
import { ApiMessage } from '../../api/types';
|
||||
|
||||
@ -56,7 +56,6 @@ type StateProps = {
|
||||
hasNotifications: boolean;
|
||||
hasDialogs: boolean;
|
||||
audioMessage?: ApiMessage;
|
||||
audioOrigin?: AudioOrigin;
|
||||
safeLinkModalUrl?: string;
|
||||
isHistoryCalendarOpen: boolean;
|
||||
shouldSkipHistoryAnimations?: boolean;
|
||||
@ -89,7 +88,6 @@ const Main: FC<StateProps & DispatchProps> = ({
|
||||
hasNotifications,
|
||||
hasDialogs,
|
||||
audioMessage,
|
||||
audioOrigin,
|
||||
safeLinkModalUrl,
|
||||
isHistoryCalendarOpen,
|
||||
shouldSkipHistoryAnimations,
|
||||
@ -265,7 +263,7 @@ const Main: FC<StateProps & DispatchProps> = ({
|
||||
<ForwardPicker isOpen={isForwardModalOpen} />
|
||||
<Notifications isOpen={hasNotifications} />
|
||||
<Dialogs isOpen={hasDialogs} />
|
||||
{audioMessage && <AudioPlayer key={audioMessage.id} message={audioMessage} origin={audioOrigin} noUi />}
|
||||
{audioMessage && <AudioPlayer key={audioMessage.id} message={audioMessage} noUi />}
|
||||
<SafeLinkModal url={safeLinkModalUrl} />
|
||||
<HistoryCalendar isOpen={isHistoryCalendarOpen} />
|
||||
<StickerSetModal
|
||||
@ -302,7 +300,7 @@ function updatePageTitle(nextTitle: string) {
|
||||
export default memo(withGlobal(
|
||||
(global): StateProps => {
|
||||
const { settings: { byKey: { animationLevel, language, wasTimeFormatSetManually } } } = global;
|
||||
const { chatId: audioChatId, messageId: audioMessageId, origin } = global.audioPlayer;
|
||||
const { chatId: audioChatId, messageId: audioMessageId } = global.audioPlayer;
|
||||
const audioMessage = audioChatId && audioMessageId
|
||||
? selectChatMessage(global, audioChatId, audioMessageId)
|
||||
: undefined;
|
||||
@ -316,7 +314,6 @@ export default memo(withGlobal(
|
||||
hasNotifications: Boolean(global.notifications.length),
|
||||
hasDialogs: Boolean(global.dialogs.length),
|
||||
audioMessage,
|
||||
audioOrigin: origin,
|
||||
safeLinkModalUrl: global.safeLinkModalUrl,
|
||||
isHistoryCalendarOpen: Boolean(global.historyCalendarSelectedAt),
|
||||
shouldSkipHistoryAnimations: global.shouldSkipHistoryAnimations,
|
||||
|
@ -13,7 +13,7 @@ import { IS_IOS, IS_SINGLE_COLUMN_LAYOUT, IS_TOUCH_ENV } from '../../util/enviro
|
||||
|
||||
import * as mediaLoader from '../../util/mediaLoader';
|
||||
import {
|
||||
getMediaDuration, getMessageContent, getMessageMediaHash, getSenderTitle,
|
||||
getMediaDuration, getMessageContent, getMessageMediaHash, getSenderTitle, isMessageLocal,
|
||||
} from '../../modules/helpers';
|
||||
import { selectChat, selectSender } from '../../modules/selectors';
|
||||
import { pick } from '../../util/iteratees';
|
||||
@ -59,7 +59,6 @@ const FAST_PLAYBACK_RATE = 1.8;
|
||||
|
||||
const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
message,
|
||||
origin = AudioOrigin.Inline,
|
||||
className,
|
||||
noUi,
|
||||
sender,
|
||||
@ -95,7 +94,6 @@ const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
makeTrackId(message),
|
||||
getMediaDuration(message)!,
|
||||
isVoice ? 'voice' : 'audio',
|
||||
origin,
|
||||
mediaData,
|
||||
undefined,
|
||||
mediaMetadata,
|
||||
@ -103,7 +101,7 @@ const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
true,
|
||||
undefined,
|
||||
undefined,
|
||||
true,
|
||||
isMessageLocal(message),
|
||||
true,
|
||||
);
|
||||
|
||||
@ -176,7 +174,7 @@ const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
color="translucent"
|
||||
size="smaller"
|
||||
className="player-button"
|
||||
disabled={isFirst}
|
||||
disabled={isFirst()}
|
||||
onClick={requestPreviousTrack}
|
||||
ariaLabel="Previous track"
|
||||
>
|
||||
@ -200,7 +198,7 @@ const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
color="translucent"
|
||||
size="smaller"
|
||||
className="player-button"
|
||||
disabled={isLast}
|
||||
disabled={isLast()}
|
||||
onClick={requestNextTrack}
|
||||
ariaLabel="Next track"
|
||||
>
|
||||
|
@ -2,7 +2,7 @@ import React, { useCallback } from '../../../../lib/teact/teact';
|
||||
import { getDispatch } from '../../../../lib/teact/teactn';
|
||||
|
||||
import { isUserId } from '../../../../modules/helpers';
|
||||
import { AudioOrigin, IAlbum, MediaViewerOrigin } from '../../../../types';
|
||||
import { IAlbum, MediaViewerOrigin } from '../../../../types';
|
||||
import {
|
||||
ApiChat, ApiMessage, ApiUser, MAIN_THREAD_ID,
|
||||
} from '../../../../api/types';
|
||||
@ -81,7 +81,7 @@ export default function useInnerHandlers(
|
||||
}, [chatId, threadId, messageId, openMediaViewer, isScheduled]);
|
||||
|
||||
const handleAudioPlay = useCallback((): void => {
|
||||
openAudioPlayer({ chatId, messageId, origin: AudioOrigin.Inline });
|
||||
openAudioPlayer({ chatId, messageId });
|
||||
}, [chatId, messageId, openAudioPlayer]);
|
||||
|
||||
const handleAlbumMediaClick = useCallback((albumMessageId: number): void => {
|
||||
|
@ -210,7 +210,7 @@ const Profile: FC<OwnProps & StateProps & DispatchProps> = ({
|
||||
}, [profileId, openMediaViewer]);
|
||||
|
||||
const handlePlayAudio = useCallback((messageId: number) => {
|
||||
openAudioPlayer({ chatId: profileId, messageId, origin: AudioOrigin.SharedMedia });
|
||||
openAudioPlayer({ chatId: profileId, messageId });
|
||||
}, [profileId, openAudioPlayer]);
|
||||
|
||||
const handleMemberClick = useCallback((id: string) => {
|
||||
|
@ -530,7 +530,7 @@ export type ActionTypes = (
|
||||
// media viewer & audio player
|
||||
'openMediaViewer' | 'closeMediaViewer' |
|
||||
'openAudioPlayer' | 'setAudioPlayerVolume' | 'setAudioPlayerPlaybackRate' |
|
||||
'setAudioPlayerMuted' | 'closeAudioPlayer' |
|
||||
'setAudioPlayerMuted' | 'setAudioPlayerOrigin' | 'closeAudioPlayer' |
|
||||
// misc
|
||||
'openPollModal' | 'closePollModal' |
|
||||
'loadWebPagePreview' | 'clearWebPagePreview' | 'loadWallpapers' | 'uploadWallpaper' |
|
||||
|
@ -3,8 +3,6 @@ import {
|
||||
} from '../lib/teact/teact';
|
||||
import { getDispatch, getGlobal } from '../lib/teact/teactn';
|
||||
|
||||
import { AudioOrigin } from '../types';
|
||||
|
||||
import { register, Track, TrackId } from '../util/audioPlayer';
|
||||
import useEffectWithPrevDeps from './useEffectWithPrevDeps';
|
||||
import { isSafariPatchInProgress } from '../util/patchSafariProgressiveAudio';
|
||||
@ -21,7 +19,6 @@ export default (
|
||||
trackId: TrackId,
|
||||
originalDuration: number, // Sometimes incorrect for voice messages
|
||||
trackType: Track['type'],
|
||||
origin: AudioOrigin,
|
||||
src?: string,
|
||||
handlers?: Record<string, Handler>,
|
||||
metadata?: MediaMetadata,
|
||||
@ -46,7 +43,7 @@ export default (
|
||||
}, [onTrackChange]);
|
||||
|
||||
useOnChange(() => {
|
||||
controllerRef.current = register(trackId, trackType, origin, (eventName, e) => {
|
||||
controllerRef.current = register(trackId, trackType, (eventName, e) => {
|
||||
switch (eventName) {
|
||||
case 'onPlay': {
|
||||
const {
|
||||
@ -120,7 +117,6 @@ export default (
|
||||
proxy,
|
||||
destroy,
|
||||
setVolume,
|
||||
setCurrentOrigin,
|
||||
stop,
|
||||
isFirst,
|
||||
isLast,
|
||||
@ -161,10 +157,9 @@ export default (
|
||||
|
||||
const playIfPresent = useCallback(() => {
|
||||
if (src) {
|
||||
setCurrentOrigin(origin);
|
||||
play(src);
|
||||
}
|
||||
}, [src, origin, play, setCurrentOrigin]);
|
||||
}, [src, play]);
|
||||
|
||||
const playPause = useCallback(() => {
|
||||
if (isPlaying) {
|
||||
@ -194,8 +189,8 @@ export default (
|
||||
duration,
|
||||
requestNextTrack,
|
||||
requestPreviousTrack,
|
||||
isFirst: isFirst(),
|
||||
isLast: isLast(),
|
||||
isFirst,
|
||||
isLast,
|
||||
setPlaybackRate,
|
||||
toggleMuted,
|
||||
};
|
||||
|
@ -179,8 +179,8 @@ addReducer('openAudioPlayer', (global, actions, payload) => {
|
||||
chatId,
|
||||
threadId,
|
||||
messageId,
|
||||
origin,
|
||||
volume: volume || global.audioPlayer.volume,
|
||||
origin: origin ?? global.audioPlayer.origin,
|
||||
volume: volume ?? global.audioPlayer.volume,
|
||||
playbackRate: playbackRate || global.audioPlayer.playbackRate,
|
||||
isMuted: isMuted || global.audioPlayer.isMuted,
|
||||
},
|
||||
@ -229,6 +229,20 @@ addReducer('setAudioPlayerMuted', (global, actions, payload) => {
|
||||
};
|
||||
});
|
||||
|
||||
addReducer('setAudioPlayerOrigin', (global, actions, payload) => {
|
||||
const {
|
||||
origin,
|
||||
} = payload!;
|
||||
|
||||
return {
|
||||
...global,
|
||||
audioPlayer: {
|
||||
...global.audioPlayer,
|
||||
origin,
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
addReducer('closeAudioPlayer', (global) => {
|
||||
return {
|
||||
...global,
|
||||
|
@ -1,6 +1,6 @@
|
||||
import { getDispatch } from '../lib/teact/teactn';
|
||||
import { getDispatch, getGlobal } from '../lib/teact/teactn';
|
||||
|
||||
import { AudioOrigin } from '../types';
|
||||
import { AudioOrigin, GlobalSearchContent } from '../types';
|
||||
import { ApiMessage } from '../api/types';
|
||||
|
||||
import { IS_SAFARI } from './environment';
|
||||
@ -8,6 +8,7 @@ import safePlay from './safePlay';
|
||||
import { patchSafariProgressiveAudio, isSafariPatchInProgress } from './patchSafariProgressiveAudio';
|
||||
import { getMessageKey, MessageKey, parseMessageKey } from '../modules/helpers';
|
||||
import { fastRaf } from './schedulers';
|
||||
import { selectCurrentMessageList } from '../modules/selectors';
|
||||
|
||||
type Handler = (eventName: string, e: Event) => void;
|
||||
export type TrackId = `${MessageKey}-${number}`;
|
||||
@ -16,22 +17,20 @@ export interface Track {
|
||||
audio: HTMLAudioElement;
|
||||
proxy: HTMLAudioElement;
|
||||
type: 'voice' | 'audio';
|
||||
origin: AudioOrigin;
|
||||
handlers: Handler[];
|
||||
onForcePlay?: NoneToVoidFunction;
|
||||
onTrackChange?: NoneToVoidFunction;
|
||||
}
|
||||
|
||||
const tracks = new Map<string, Track>();
|
||||
const tracks = new Map<TrackId, Track>();
|
||||
let voiceQueue: TrackId[] = [];
|
||||
let musicQueue: TrackId[] = [];
|
||||
|
||||
let currentTrackId: string | undefined;
|
||||
let currentTrackId: TrackId | undefined;
|
||||
|
||||
function createAudio(
|
||||
trackId: TrackId,
|
||||
type: Track['type'],
|
||||
origin: AudioOrigin,
|
||||
onForcePlay?: NoneToVoidFunction,
|
||||
onTrackChange?: NoneToVoidFunction,
|
||||
): Track {
|
||||
@ -77,7 +76,6 @@ function createAudio(
|
||||
proxy: new Proxy(audio, {
|
||||
get: (target, key: keyof HTMLAudioElement) => target[key],
|
||||
}),
|
||||
origin,
|
||||
handlers: [],
|
||||
onForcePlay,
|
||||
onTrackChange,
|
||||
@ -92,11 +90,9 @@ function playNext(trackId: TrackId, isReverseOrder?: boolean) {
|
||||
if (currentTrack.onTrackChange) currentTrack.onTrackChange();
|
||||
}
|
||||
|
||||
const track = tracks.get(trackId)!;
|
||||
const queue = getTrackQueue(track);
|
||||
if (!queue) return;
|
||||
const origin = getGlobal().audioPlayer.origin || AudioOrigin.Inline;
|
||||
|
||||
const nextTrackId = findNextInQueue(queue, trackId, track.origin, isReverseOrder);
|
||||
const nextTrackId = findNextInQueue(trackId, origin, isReverseOrder);
|
||||
if (!nextTrackId) {
|
||||
return;
|
||||
}
|
||||
@ -110,8 +106,6 @@ function playNext(trackId: TrackId, isReverseOrder?: boolean) {
|
||||
|
||||
const nextTrack = tracks.get(nextTrackId)!;
|
||||
|
||||
if (currentTrack) nextTrack.origin = currentTrack.origin; // Preserve origin
|
||||
|
||||
if (nextTrack.onForcePlay) {
|
||||
nextTrack.onForcePlay();
|
||||
}
|
||||
@ -133,13 +127,12 @@ export function stopCurrentAudio() {
|
||||
export function register(
|
||||
trackId: TrackId,
|
||||
trackType: Track['type'],
|
||||
origin: AudioOrigin,
|
||||
handler: Handler,
|
||||
onForcePlay?: NoneToVoidFunction,
|
||||
onTrackChange?: NoneToVoidFunction,
|
||||
) {
|
||||
if (!tracks.has(trackId)) {
|
||||
const track = createAudio(trackId, trackType, origin, onForcePlay, onTrackChange);
|
||||
const track = createAudio(trackId, trackType, onForcePlay, onTrackChange);
|
||||
tracks.set(trackId, track);
|
||||
addTrackToQueue(track, trackId);
|
||||
}
|
||||
@ -173,10 +166,7 @@ export function register(
|
||||
}
|
||||
|
||||
safePlay(audio);
|
||||
},
|
||||
|
||||
setCurrentOrigin(audioOrigin: AudioOrigin) {
|
||||
tracks.get(trackId)!.origin = audioOrigin;
|
||||
cleanUpQueue(trackType, trackId);
|
||||
},
|
||||
|
||||
pause() {
|
||||
@ -236,17 +226,11 @@ export function register(
|
||||
},
|
||||
|
||||
isLast() {
|
||||
const track = tracks.get(trackId)!;
|
||||
const queue = getTrackQueue(track);
|
||||
if (!queue) return true;
|
||||
return !findNextInQueue(queue, trackId, tracks.get(trackId)!.origin);
|
||||
return !findNextInQueue(trackId, getGlobal().audioPlayer.origin);
|
||||
},
|
||||
|
||||
isFirst() {
|
||||
const track = tracks.get(trackId)!;
|
||||
const queue = getTrackQueue(track);
|
||||
if (!queue) return true;
|
||||
return !findNextInQueue(queue, trackId, tracks.get(trackId)!.origin, true);
|
||||
return !findNextInQueue(trackId, getGlobal().audioPlayer.origin, true);
|
||||
},
|
||||
|
||||
requestPreviousTrack() {
|
||||
@ -278,14 +262,11 @@ export function register(
|
||||
}
|
||||
|
||||
function getTrackQueue(track: Track) {
|
||||
if (track.type === 'audio') {
|
||||
return musicQueue;
|
||||
switch (track.type) {
|
||||
case 'audio': return musicQueue;
|
||||
case 'voice': return voiceQueue;
|
||||
default: return undefined;
|
||||
}
|
||||
|
||||
if (track.type === 'voice') {
|
||||
return voiceQueue;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function addTrackToQueue(track: Track, trackId: TrackId) {
|
||||
@ -301,26 +282,48 @@ function addTrackToQueue(track: Track, trackId: TrackId) {
|
||||
}
|
||||
|
||||
function removeFromQueue(track: Track, trackId: TrackId) {
|
||||
const trackIdFilter = (el: TrackId) => el !== trackId;
|
||||
if (track.type === 'audio') {
|
||||
musicQueue = musicQueue.filter((el) => el !== trackId);
|
||||
musicQueue = musicQueue.filter(trackIdFilter);
|
||||
}
|
||||
|
||||
if (track.type === 'voice') {
|
||||
voiceQueue = voiceQueue.filter((el) => el !== trackId);
|
||||
voiceQueue = voiceQueue.filter(trackIdFilter);
|
||||
}
|
||||
}
|
||||
|
||||
function findNextInQueue(queue: TrackId[], current: TrackId, origin: AudioOrigin, isReverseOrder?: boolean) {
|
||||
function cleanUpQueue(type: Track['type'], trackId: TrackId) {
|
||||
if (getGlobal().globalSearch.currentContent === GlobalSearchContent.Music) return;
|
||||
const { chatId } = parseMessageKey(splitTrackId(trackId).messageKey);
|
||||
const openedChatId = selectCurrentMessageList(getGlobal())?.chatId;
|
||||
const queueFilter = (id: string) => (
|
||||
id.startsWith(`msg${chatId}`) || (openedChatId && id.startsWith(`msg${openedChatId}`))
|
||||
);
|
||||
|
||||
if (type === 'audio') {
|
||||
musicQueue = musicQueue.filter(queueFilter);
|
||||
}
|
||||
|
||||
if (type === 'voice') {
|
||||
voiceQueue = voiceQueue.filter(queueFilter);
|
||||
}
|
||||
}
|
||||
|
||||
function findNextInQueue(currentId: TrackId, origin = AudioOrigin.Inline, isReverseOrder?: boolean) {
|
||||
const track = tracks.get(currentId)!;
|
||||
const queue = getTrackQueue(track);
|
||||
if (!queue) return undefined;
|
||||
|
||||
if (origin === AudioOrigin.Search) {
|
||||
const index = queue.indexOf(current);
|
||||
const index = queue.indexOf(currentId);
|
||||
if (index < 0) return undefined;
|
||||
const direction = isReverseOrder ? -1 : 1;
|
||||
return queue[index + direction];
|
||||
}
|
||||
|
||||
const { chatId } = parseMessageKey(splitTrackId(current).messageKey);
|
||||
const { chatId } = parseMessageKey(splitTrackId(currentId).messageKey);
|
||||
const chatAudio = queue.filter((id) => id.startsWith(`msg${chatId}`));
|
||||
const index = chatAudio.indexOf(current);
|
||||
const index = chatAudio.indexOf(currentId);
|
||||
if (index < 0) return undefined;
|
||||
let direction = origin === AudioOrigin.Inline ? -1 : 1;
|
||||
if (isReverseOrder) direction *= -1;
|
||||
|
Loading…
x
Reference in New Issue
Block a user