Audio, Video Player: Better seeking, fix ordering, support covers and Media Session, redesign (#1306)

This commit is contained in:
Alexander Zinchuk 2021-09-06 15:29:05 +03:00
parent cb7253f5eb
commit 37f5d026bb
37 changed files with 1062 additions and 278 deletions

6
package-lock.json generated
View File

@ -4582,6 +4582,12 @@
"@types/jest": "*"
}
},
"@types/wicg-mediasession": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@types/wicg-mediasession/-/wicg-mediasession-1.1.2.tgz",
"integrity": "sha512-5UJ8tBtgMmIzyJafmfBjq2VcXk0SXqZnNCApepKiqLracDV4+dJdYPZCW0IwaYfOBkY54SmfCOWqMBmLCPrfuQ==",
"dev": true
},
"@types/yargs": {
"version": "16.0.4",
"resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.4.tgz",

View File

@ -49,6 +49,7 @@
"@types/react": "^17.0.17",
"@types/react-dom": "^17.0.9",
"@types/resize-observer-browser": "^0.1.6",
"@types/wicg-mediasession": "^1.1.2",
"@typescript-eslint/eslint-plugin": "^4.29.1",
"@typescript-eslint/parser": "^4.29.1",
"@webpack-cli/serve": "^1.5.1",

View File

@ -27,7 +27,7 @@ import { DELETED_COMMENTS_CHANNEL_ID, LOCAL_MESSAGE_ID_BASE, SERVICE_NOTIFICATIO
import { pick } from '../../../util/iteratees';
import { getApiChatIdFromMtpPeer } from './chats';
import { buildStickerFromDocument } from './symbols';
import { buildApiPhoto, buildApiThumbnailFromStripped } from './common';
import { buildApiPhoto, buildApiPhotoSize, buildApiThumbnailFromStripped } from './common';
import { interpolateArray } from '../../../util/waveform';
import { buildPeer } from '../gramjsBuilders';
import { addPhotoToLocalDb, resolveMessageApiChatId } from '../helpers';
@ -348,8 +348,13 @@ function buildAudio(media: GramJs.TypeMessageMedia): ApiAudio | undefined {
return undefined;
}
const thumbnailSizes = media.document.thumbs && media.document.thumbs
.filter((thumb): thumb is GramJs.PhotoSize => thumb instanceof GramJs.PhotoSize)
.map((thumb) => buildApiPhotoSize(thumb));
return {
fileName: getFilenameFromDocument(media.document, 'audio'),
thumbnailSizes,
...pick(media.document, ['size', 'mimeType']),
...pick(audioAttribute, ['duration', 'performer', 'title']),
};

View File

@ -68,6 +68,7 @@ export interface ApiAudio {
duration: number;
performer?: string;
title?: string;
thumbnailSizes?: ApiPhotoSize[];
}
export interface ApiVoice {

View File

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 240 240"><defs><linearGradient id="a" x1=".667" x2=".417" y1=".167" y2=".75"><stop offset="0" stop-color="#37aee2"/><stop offset="1" stop-color="#1e96c8"/></linearGradient><linearGradient id="b" x1=".66" x2=".851" y1=".437" y2=".802"><stop offset="0" stop-color="#eff7fc"/><stop offset="1" stop-color="#fff"/></linearGradient></defs><rect width="100%" height="100%" fill="url(#a)"/><path fill="#c8daea" d="M98 175c-3.888 0-3.227-1.468-4.568-5.17L82 132.207 170 80"/><path fill="#a9c9dd" d="M98 175c3 0 4.325-1.372 6-3l16-15.558-19.958-12.035"/><path fill="url(#b)" d="M100.04 144.41l48.36 35.729c5.519 3.045 9.501 1.468 10.876-5.123l19.685-92.763c2.015-8.08-3.08-11.746-8.36-9.349l-115.59 44.571c-7.89 3.165-7.843 7.567-1.438 9.528l29.663 9.259 68.673-43.325c3.242-1.966 6.218-.91 3.776 1.258"/></svg>

After

Width:  |  Height:  |  Size: 854 B

View File

@ -1,9 +1,15 @@
.Audio {
display: flex;
align-items: flex-start;
position: relative;
&.media-inner {
overflow: visible;
.media-loading {
position: absolute;
pointer-events: none;
&.interactive {
pointer-events: all;
}
}
&.own {
@ -70,21 +76,13 @@
}
}
.media-loading {
pointer-events: none;
.interactive {
pointer-events: auto;
}
}
.download-button {
position: absolute;
width: 1.125rem !important;
height: 1.125rem !important;
width: 1.3125rem !important;
height: 1.3125rem !important;
padding: 0;
left: 1.5rem;
top: 1.5rem;
left: 1.8rem;
top: 1.8rem;
border: .125rem solid var(--background-color);
z-index: 1;
@ -115,6 +113,7 @@
font-weight: 500;
margin: 0;
line-height: 1.25;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
@ -123,12 +122,30 @@
margin: .25rem 0 0;
font-size: .875rem;
color: var(--color-text-secondary);
font-variant-numeric: tabular-nums;
display: flex;
align-items: center;
span {
margin-left: 0.25rem;
font-size: 1.5rem;
line-height: .875rem;
vertical-align: middle;
.unread {
display: block;
position: relative;
margin-left: 0.5rem;
&::before {
content: "";
display: block;
position: absolute;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
width: 0.4rem;
height: 0.4rem;
border-radius: 50%;
background-color: var(--color-text-secondary);
}
}
}
@ -139,41 +156,69 @@
.waveform {
cursor: pointer;
margin-left: 1px;
touch-action: none;
}
.meta,
.performer,
.date {
font-size: .875rem;
line-height: 1;
color: var(--color-text-secondary);
margin: 0;
overflow: hidden;
white-space: nowrap;
text-overflow: ellipsis;
}
.duration {
margin: .1875rem 0 0;
font-size: .875rem;
color: var(--color-text-secondary);
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
flex-shrink: 0;
font-variant-numeric: tabular-nums;
}
.meta {
display: flex;
align-items: center;
margin-top: 0.125rem;
padding-inline-end: 0.5rem;
& > span {
min-width: 0;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.performer {
flex-shrink: 0;
}
.duration.with-seekline {
margin-inline-end: 0.625rem;
}
.bullet {
margin: 0 0.25rem;
flex-shrink: 0;
}
}
.seekline {
width: calc(100% - 2px);
padding-left: 6px;
margin-bottom: .3125rem;
height: 12px;
flex-grow: 1;
height: 1.25rem;
position: relative;
margin-left: -6px;
top: 1px;
top: 3px;
cursor: pointer;
touch-action: none;
&::before {
content: '';
position: absolute;
width: 100%;
left: 6px;
top: 6px;
height: 2px;
background-color: var(--color-interactive-inactive);
@ -188,7 +233,6 @@
overflow: hidden;
width: 100%;
top: 6px;
left: 6px;
i {
position: absolute;
@ -220,7 +264,7 @@
content: '';
position: absolute;
top: -6px;
right: -12px;
right: -6px;
width: 12px;
height: 12px;
border-radius: 6px;
@ -230,10 +274,6 @@
}
&.bigger {
.content {
margin-top: .1875rem;
}
.title {
white-space: nowrap;
overflow: hidden;
@ -242,17 +282,15 @@
line-height: 1.5rem;
}
.meta,
.meta {
height: 1.25rem;
}
.performer,
.date {
line-height: 1.0625rem;
}
.seekline {
top: 2px;
margin-bottom: .5rem;
}
.duration {
line-height: 1rem;
}
@ -276,6 +314,10 @@
}
&[dir=rtl] {
&:last-child {
margin-bottom: 0.625rem;
}
.toggle-play {
margin-left: .5rem;
margin-right: 0;
@ -286,6 +328,10 @@
}
}
.meta.duration.with-seekline {
margin-inline-start: 0.625rem;
}
.content,
.duration {
text-align: right;
@ -293,7 +339,11 @@
.download-button {
left: auto;
right: 1.5rem;
right: 2rem;
}
}
}
.has-replies .Audio[dir=rtl] {
margin-bottom: 1.625rem;
}

View File

@ -3,9 +3,9 @@ import React, {
} from '../../lib/teact/teact';
import {
ApiAudio, ApiMessage, ApiVoice,
ApiAudio, ApiMediaFormat, ApiMessage, ApiVoice,
} from '../../api/types';
import { ISettings } from '../../types';
import { AudioOrigin, ISettings } from '../../types';
import { IS_SINGLE_COLUMN_LAYOUT } from '../../util/environment';
import { formatMediaDateTime, formatMediaDuration, formatPastTimeShort } from '../../util/dateFormat';
@ -13,7 +13,6 @@ import {
getMediaDuration,
getMediaTransferState,
getMessageAudioCaption,
getMessageKey,
getMessageMediaFormat,
getMessageMediaHash,
isMessageLocal,
@ -22,6 +21,7 @@ import {
import { renderWaveformToDataUri } from './helpers/waveform';
import buildClassName from '../../util/buildClassName';
import renderText from './helpers/renderText';
import { getFileSizeString } from './helpers/documentInfo';
import { decodeWaveform, interpolateArray } from '../../util/waveform';
import useMediaWithDownloadProgress from '../../hooks/useMediaWithDownloadProgress';
import useShowTransition from '../../hooks/useShowTransition';
@ -29,6 +29,10 @@ import useBuffering from '../../hooks/useBuffering';
import useAudioPlayer from '../../hooks/useAudioPlayer';
import useMediaDownload from '../../hooks/useMediaDownload';
import useLang, { LangFn } from '../../hooks/useLang';
import { captureEvents } from '../../util/captureEvents';
import useMedia from '../../hooks/useMedia';
import { makeTrackId } from '../../util/audioPlayer';
import { getTranslation } from '../../util/langProvider';
import Button from '../ui/Button';
import ProgressSpinner from '../ui/ProgressSpinner';
@ -41,7 +45,7 @@ type OwnProps = {
message: ApiMessage;
senderTitle?: string;
uploadProgress?: number;
target?: 'searchResult' | 'sharedMedia';
origin: AudioOrigin;
date?: number;
lastSyncTime?: number;
className?: string;
@ -53,12 +57,6 @@ type OwnProps = {
onDateClick?: (messageId: number, chatId: number) => void;
};
interface ISeekMethods {
handleStartSeek: (e: React.MouseEvent<HTMLElement>) => void;
handleSeek: (e: React.MouseEvent<HTMLElement>) => void;
handleStopSeek: () => void;
}
const AVG_VOICE_DURATION = 30;
const MIN_SPIKES = IS_SINGLE_COLUMN_LAYOUT ? 20 : 25;
const MAX_SPIKES = IS_SINGLE_COLUMN_LAYOUT ? 50 : 75;
@ -70,7 +68,7 @@ const Audio: FC<OwnProps> = ({
message,
senderTitle,
uploadProgress,
target,
origin,
date,
lastSyncTime,
className,
@ -84,10 +82,16 @@ const Audio: FC<OwnProps> = ({
const { content: { audio, voice }, isMediaUnread } = message;
const isVoice = Boolean(voice);
const isSeeking = useRef<boolean>(false);
const playStateBeforeSeeking = useRef<boolean>(false);
// eslint-disable-next-line no-null/no-null
const seekerRef = useRef<HTMLElement>(null);
const lang = useLang();
const { isRtl } = lang;
const [isActivated, setIsActivated] = useState(false);
const shouldDownload = (isActivated || PRELOAD) && lastSyncTime;
const coverHash = getMessageMediaHash(message, 'pictogram');
const coverBlobUrl = useMedia(coverHash, false, ApiMediaFormat.BlobUrl);
const { mediaData, downloadProgress } = useMediaWithDownloadProgress(
getMessageMediaHash(message, 'inline'),
@ -95,28 +99,38 @@ const Audio: FC<OwnProps> = ({
getMessageMediaFormat(message, 'inline'),
);
function handleForcePlay() {
const handleForcePlay = useCallback(() => {
setIsActivated(true);
onPlay(message.id, message.chatId);
}
}, [message, onPlay]);
const handleTrackChange = useCallback(() => {
setIsActivated(false);
}, []);
const {
isBuffered, bufferedProgress, bufferingHandlers, checkBuffering,
} = useBuffering();
const {
isPlaying, playProgress, playPause, setCurrentTime, duration,
isPlaying, playProgress, playPause, play, pause, setCurrentTime, duration,
} = useAudioPlayer(
getMessageKey(message),
makeTrackId(message),
getMediaDuration(message)!,
isVoice ? 'voice' : 'audio',
origin,
mediaData,
bufferingHandlers,
undefined,
checkBuffering,
isActivated,
handleForcePlay,
handleTrackChange,
isMessageLocal(message),
);
const withSeekline = isPlaying || (playProgress > 0 && playProgress < 1);
useEffect(() => {
setIsActivated(isPlaying);
}, [isPlaying]);
@ -143,7 +157,7 @@ const Audio: FC<OwnProps> = ({
} = useShowTransition(isTransferring);
const handleButtonClick = useCallback(() => {
if (isUploading) {
if (isUploading && !isPlaying) {
if (onCancelUpload) {
onCancelUpload();
}
@ -165,29 +179,43 @@ const Audio: FC<OwnProps> = ({
}
}, [isPlaying, isMediaUnread, onReadMedia]);
const handleSeek = useCallback((e: React.MouseEvent<HTMLElement>) => {
if (isSeeking.current) {
const seekBar = e.currentTarget.closest('.seekline,.waveform');
if (seekBar) {
const { width, left } = seekBar.getBoundingClientRect();
setCurrentTime(duration * ((e.clientX - left) / width));
}
const handleSeek = useCallback((e: MouseEvent | TouchEvent) => {
if (isSeeking.current && seekerRef.current) {
const { width, left } = seekerRef.current.getBoundingClientRect();
const clientX = e instanceof MouseEvent ? e.clientX : e.targetTouches[0].clientX;
e.stopPropagation(); // Prevent Slide-to-Reply activation
// Prevent track skipping while seeking near end
setCurrentTime(Math.max(Math.min(duration * ((clientX - left) / width), duration - 0.1), 0.001));
}
}, [duration, setCurrentTime]);
const handleStartSeek = useCallback((e: React.MouseEvent<HTMLElement>) => {
const handleStartSeek = useCallback((e: MouseEvent | TouchEvent) => {
if (e instanceof MouseEvent && e.button === 2) return;
isSeeking.current = true;
playStateBeforeSeeking.current = isPlaying;
pause();
handleSeek(e);
}, [handleSeek]);
}, [handleSeek, pause, isPlaying]);
const handleStopSeek = useCallback(() => {
isSeeking.current = false;
}, []);
if (playStateBeforeSeeking.current) play();
}, [play]);
const handleDateClick = useCallback(() => {
onDateClick!(message.id, message.chatId);
}, [onDateClick, message.id, message.chatId]);
useEffect(() => {
if (!seekerRef.current || !withSeekline) return undefined;
return captureEvents(seekerRef.current, {
onCapture: handleStartSeek,
onRelease: handleStopSeek,
onClick: handleStopSeek,
onDrag: handleSeek,
});
}, [withSeekline, handleStartSeek, handleSeek, handleStopSeek]);
function getFirstLine() {
if (isVoice) {
return senderTitle || 'Voice';
@ -200,32 +228,37 @@ const Audio: FC<OwnProps> = ({
function getSecondLine() {
if (isVoice) {
return formatMediaDuration(voice!.duration);
return (
<div className="meta" dir={isRtl ? 'rtl' : undefined}>
{formatMediaDuration(voice!.duration)}
</div>
);
}
const { performer } = audio!;
return (
<>
{performer && renderText(performer)}
{performer && senderTitle && <span>&bull;</span>}
{senderTitle && renderText(senderTitle)}
</>
<div className="meta" dir={isRtl ? 'rtl' : undefined}>
{formatMediaDuration(duration)}
<span className="bullet">&bull;</span>
{performer && <span className="performer" title={performer}>{renderText(performer)}</span>}
{performer && senderTitle && <span className="bullet">&bull;</span>}
{senderTitle && <span title={senderTitle}>{renderText(senderTitle)}</span>}
</div>
);
}
const seekHandlers = { handleStartSeek, handleSeek, handleStopSeek };
const isOwn = isOwnMessage(message);
const renderedWaveform = useMemo(
() => voice && renderWaveform(voice, playProgress, isOwn, { handleStartSeek, handleSeek, handleStopSeek }, theme),
[voice, playProgress, isOwn, handleStartSeek, handleSeek, handleStopSeek, theme],
() => voice && renderWaveform(voice, playProgress, isOwn, theme, seekerRef),
[voice, playProgress, isOwn, theme],
);
const fullClassName = buildClassName(
'Audio media-inner',
'Audio',
className,
isOwn && !target && 'own',
target && 'bigger',
isOwn && origin === AudioOrigin.Inline && 'own',
(origin === AudioOrigin.Search || origin === AudioOrigin.SharedMedia) && 'bigger',
isSelected && 'audio-is-selected',
);
@ -238,15 +271,14 @@ const Audio: FC<OwnProps> = ({
buttonClassNames.push('play');
}
const showSeekline = isPlaying || (playProgress > 0 && playProgress < 1);
const contentClassName = buildClassName('content', showSeekline && 'with-seekline');
const contentClassName = buildClassName('content', withSeekline && 'with-seekline');
function renderSearchResult() {
return (
<>
<div className={contentClassName}>
<div className="content-row">
<p className="title" dir="auto">{renderText(getFirstLine())}</p>
<p className="title" dir="auto" title={getFirstLine()}>{renderText(getFirstLine())}</p>
<div className="message-date">
{date && (
@ -260,13 +292,15 @@ const Audio: FC<OwnProps> = ({
</div>
</div>
{showSeekline && renderSeekline(playProgress, bufferedProgress, seekHandlers)}
{!showSeekline && (
<p className="duration" dir="auto">
{playProgress > 0 ? `${formatMediaDuration(duration * playProgress)} / ` : undefined}
{getSecondLine()}
</p>
{withSeekline && (
<div className="meta search-result" dir={isRtl ? 'rtl' : undefined}>
<span className="duration with-seekline" dir="auto">
{playProgress < 1 && `${formatMediaDuration(duration * playProgress, duration)}`}
</span>
{renderSeekline(playProgress, bufferedProgress, seekerRef)}
</div>
)}
{!withSeekline && getSecondLine()}
</div>
</>
);
@ -282,11 +316,13 @@ const Audio: FC<OwnProps> = ({
<Button
round
ripple={!IS_SINGLE_COLUMN_LAYOUT}
size={target ? 'smaller' : 'tiny'}
size="smaller"
color={coverBlobUrl ? 'translucent-white' : 'primary'}
className={buttonClassNames.join(' ')}
ariaLabel={isPlaying ? 'Pause audio' : 'Play audio'}
onClick={handleButtonClick}
isRtl={lang.isRtl}
backgroundImage={coverBlobUrl}
>
<i className="icon-play" />
<i className="icon-pause" />
@ -296,13 +332,13 @@ const Audio: FC<OwnProps> = ({
<ProgressSpinner
progress={transferProgress}
transparent
size={target ? 'm' : 's'}
size="m"
onClick={isLoadingForPlaying ? handleButtonClick : undefined}
noCross={!isLoadingForPlaying}
/>
</div>
)}
{audio && (
{audio && !isUploading && (
<Button
round
size="tiny"
@ -313,12 +349,12 @@ const Audio: FC<OwnProps> = ({
<i className={isDownloadStarted ? 'icon-close' : 'icon-arrow-down'} />
</Button>
)}
{target === 'searchResult' && renderSearchResult()}
{target !== 'searchResult' && audio && renderAudio(
lang, audio, isPlaying, playProgress, bufferedProgress, seekHandlers, date,
onDateClick ? handleDateClick : undefined,
{origin === AudioOrigin.Search && renderSearchResult()}
{origin !== AudioOrigin.Search && audio && renderAudio(
lang, audio, duration, isPlaying, playProgress, bufferedProgress, seekerRef, (isDownloadStarted || isUploading),
date, transferProgress, onDateClick ? handleDateClick : undefined,
)}
{target !== 'searchResult' && voice && renderVoice(voice, renderedWaveform, isMediaUnread)}
{origin !== AudioOrigin.Search && voice && renderVoice(voice, renderedWaveform, playProgress, isMediaUnread)}
</div>
);
};
@ -326,50 +362,62 @@ const Audio: FC<OwnProps> = ({
function renderAudio(
lang: LangFn,
audio: ApiAudio,
duration: number,
isPlaying: boolean,
playProgress: number,
bufferedProgress: number,
seekHandlers: ISeekMethods,
seekerRef: React.Ref<HTMLElement>,
showProgress?: boolean,
date?: number,
progress?: number,
handleDateClick?: NoneToVoidFunction,
) {
const {
title, performer, duration, fileName,
title, performer, fileName,
} = audio;
const showSeekline = isPlaying || (playProgress > 0 && playProgress < 1);
const { isRtl } = getTranslation;
return (
<div className="content">
<p className="title" dir="auto">{renderText(title || fileName)}</p>
{showSeekline && renderSeekline(playProgress, bufferedProgress, seekHandlers)}
{!showSeekline && (
<div className="meta" dir="auto">
<span className="performer">{renderText(performer || 'Unknown')}</span>
<p className="title" dir="auto" title={title}>{renderText(title || fileName)}</p>
{showSeekline && (
<div className="meta" dir={isRtl ? 'rtl' : undefined}>
<span className="duration with-seekline" dir="auto">
{formatMediaDuration(duration * playProgress, duration)}
</span>
{renderSeekline(playProgress, bufferedProgress, seekerRef)}
</div>
)}
{!showSeekline && showProgress && (
<div className="meta" dir={isRtl ? 'rtl' : undefined}>
{progress ? `${getFileSizeString(audio!.size * progress)} / ` : undefined}{getFileSizeString(audio!.size)}
</div>
)}
{!showSeekline && !showProgress && (
<div className="meta" dir={isRtl ? 'rtl' : undefined}>
<span className="duration" dir="auto">{formatMediaDuration(duration)}</span>
<span className="bullet">&bull;</span>
<span className="performer" dir="auto" title={performer}>{renderText(performer || 'Unknown')}</span>
{date && (
<>
{' '}
&bull;
{' '}
<span className="bullet">&bull;</span>
<Link className="date" onClick={handleDateClick}>{formatMediaDateTime(lang, date * 1000)}</Link>
</>
)}
</div>
)}
<p className="duration" dir="auto">
{playProgress > 0 ? `${formatMediaDuration(duration * playProgress)} / ` : undefined}
{formatMediaDuration(duration)}
</p>
</div>
);
}
function renderVoice(voice: ApiVoice, renderedWaveform: any, isMediaUnread?: boolean) {
function renderVoice(voice: ApiVoice, renderedWaveform: any, playProgress: number, isMediaUnread?: boolean) {
return (
<div className="content">
{renderedWaveform}
<p className="voice-duration" dir="auto">
{formatMediaDuration(voice.duration)}
{isMediaUnread && <span>&bull;</span>}
{playProgress === 0 ? formatMediaDuration(voice.duration) : formatMediaDuration(voice.duration * playProgress)}
{isMediaUnread && <span className="unread" />}
</p>
</div>
);
@ -379,8 +427,8 @@ function renderWaveform(
voice: ApiVoice,
playProgress = 0,
isOwn = false,
{ handleStartSeek, handleSeek, handleStopSeek }: ISeekMethods,
theme: ISettings['theme'],
seekerRef: React.Ref<HTMLElement>,
) {
const { waveform, duration } = voice;
@ -411,9 +459,7 @@ function renderWaveform(
height={height}
className="waveform"
draggable={false}
onMouseDown={handleStartSeek}
onMouseMove={handleSeek}
onMouseUp={handleStopSeek}
ref={seekerRef as React.Ref<HTMLImageElement>}
/>
);
}
@ -421,14 +467,12 @@ function renderWaveform(
function renderSeekline(
playProgress: number,
bufferedProgress: number,
{ handleStartSeek, handleSeek, handleStopSeek }: ISeekMethods,
seekerRef: React.Ref<HTMLElement>,
) {
return (
<div
className="seekline no-selection"
onMouseDown={handleStartSeek}
onMouseMove={handleSeek}
onMouseUp={handleStopSeek}
ref={seekerRef as React.Ref<HTMLDivElement>}
>
<span className="seekline-buffered-progress">
<i

View File

@ -4,7 +4,7 @@ import React, {
import { withGlobal } from '../../../lib/teact/teactn';
import { GlobalActions } from '../../../global/types';
import { LoadMoreDirection } from '../../../types';
import { AudioOrigin, LoadMoreDirection } from '../../../types';
import { SLIDE_TRANSITION_DURATION } from '../../../config';
import { MEMO_EMPTY_ARRAY } from '../../../util/memo';
@ -76,7 +76,7 @@ const AudioResults: FC<OwnProps & StateProps & DispatchProps> = ({
}, [focusMessage]);
const handlePlayAudio = useCallback((messageId: number, chatId: number) => {
openAudioPlayer({ chatId, messageId });
openAudioPlayer({ chatId, messageId, origin: AudioOrigin.Search });
}, [openAudioPlayer]);
function renderList() {
@ -97,7 +97,7 @@ const AudioResults: FC<OwnProps & StateProps & DispatchProps> = ({
key={message.id}
theme={theme}
message={message}
target="searchResult"
origin={AudioOrigin.Search}
senderTitle={getSenderName(lang, message, chatsById, usersById)}
date={message.date}
lastSyncTime={lastSyncTime}

View File

@ -96,10 +96,6 @@
}
.Audio {
.duration span {
padding: 0 .25rem;
}
.ProgressSpinner {
margin: -.1875rem 0 0 -.1875rem;
}

View File

@ -3,6 +3,7 @@ import React, {
} from '../../lib/teact/teact';
import { getGlobal, withGlobal } from '../../lib/teact/teactn';
import { AudioOrigin } from '../../types';
import { GlobalActions } from '../../global/types';
import { ApiMessage } from '../../api/types';
import { LangCode } from '../../types';
@ -55,6 +56,7 @@ type StateProps = {
hasNotifications: boolean;
hasDialogs: boolean;
audioMessage?: ApiMessage;
audioOrigin?: AudioOrigin;
safeLinkModalUrl?: string;
isHistoryCalendarOpen: boolean;
shouldSkipHistoryAnimations?: boolean;
@ -84,6 +86,7 @@ const Main: FC<StateProps & DispatchProps> = ({
hasNotifications,
hasDialogs,
audioMessage,
audioOrigin,
safeLinkModalUrl,
isHistoryCalendarOpen,
shouldSkipHistoryAnimations,
@ -242,7 +245,7 @@ const Main: FC<StateProps & DispatchProps> = ({
<ForwardPicker isOpen={isForwardModalOpen} />
<Notifications isOpen={hasNotifications} />
<Dialogs isOpen={hasDialogs} />
{audioMessage && <AudioPlayer key={audioMessage.id} message={audioMessage} noUi />}
{audioMessage && <AudioPlayer key={audioMessage.id} message={audioMessage} origin={audioOrigin} noUi />}
<SafeLinkModal url={safeLinkModalUrl} />
<HistoryCalendar isOpen={isHistoryCalendarOpen} />
<StickerSetModal
@ -277,7 +280,7 @@ function updatePageTitle(nextTitle: string) {
export default memo(withGlobal(
(global): StateProps => {
const { chatId: audioChatId, messageId: audioMessageId } = global.audioPlayer;
const { chatId: audioChatId, messageId: audioMessageId, origin } = global.audioPlayer;
const audioMessage = audioChatId && audioMessageId
? selectChatMessage(global, audioChatId, audioMessageId)
: undefined;
@ -292,6 +295,7 @@ export default memo(withGlobal(
hasNotifications: Boolean(global.notifications.length),
hasDialogs: Boolean(global.dialogs.length),
audioMessage,
audioOrigin: origin,
safeLinkModalUrl: global.safeLinkModalUrl,
isHistoryCalendarOpen: Boolean(global.historyCalendarSelectedAt),
shouldSkipHistoryAnimations: global.shouldSkipHistoryAnimations,

View File

@ -118,10 +118,8 @@ const VideoPlayer: FC<OwnProps> = ({
}
}, [exitFullscreen, isFullscreen, setFullscreen]);
const handleSeek = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
e.stopPropagation();
videoRef.current!.currentTime = (Number(e.target.value) * videoRef.current!.duration) / 100;
const handleSeek = useCallback((position: number) => {
videoRef.current!.currentTime = position;
}, []);
const toggleControls = useCallback((e: React.MouseEvent<HTMLDivElement>) => {

View File

@ -101,7 +101,8 @@
right: 1rem;
top: 0;
height: 1rem;
touch-action: none;
cursor: pointer;
&-track {
position: absolute;
@ -143,18 +144,5 @@
transform: translate(.325rem, -50%);
}
}
&-input {
width: 100%;
opacity: 0;
margin: 0;
padding: 0;
cursor: pointer;
overflow: hidden;
&::-webkit-slider-thumb {
margin-top: -2rem;
}
}
}
}

View File

@ -1,9 +1,12 @@
import React, { FC, useState, useEffect } from '../../lib/teact/teact';
import React, {
FC, useState, useEffect, useRef, useCallback,
} from '../../lib/teact/teact';
import { IS_SINGLE_COLUMN_LAYOUT } from '../../util/environment';
import { formatMediaDuration } from '../../util/dateFormat';
import formatFileSize from './helpers/formatFileSize';
import useLang from '../../hooks/useLang';
import { captureEvents } from '../../util/captureEvents';
import Button from '../ui/Button';
@ -21,11 +24,9 @@ type IProps = {
isFullscreen: boolean;
onChangeFullscreen: (e: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
onPlayPause: (e: React.MouseEvent<HTMLButtonElement, MouseEvent>) => void;
onSeek: OnChangeHandler;
onSeek: (position: number) => void;
};
type OnChangeHandler = (e: React.ChangeEvent<HTMLInputElement>) => void;
const stopEvent = (e: React.MouseEvent<HTMLElement>) => {
e.stopPropagation();
};
@ -47,6 +48,9 @@ const VideoPlayerControls: FC<IProps> = ({
onSeek,
}) => {
const [isVisible, setVisibility] = useState(true);
// eslint-disable-next-line no-null/no-null
const seekerRef = useRef<HTMLDivElement>(null);
const isSeeking = useRef<boolean>(false);
useEffect(() => {
if (isForceVisible) {
@ -86,13 +90,40 @@ const VideoPlayerControls: FC<IProps> = ({
const lang = useLang();
const handleSeek = useCallback((e: MouseEvent | TouchEvent) => {
if (isSeeking.current && seekerRef.current) {
const { width, left } = seekerRef.current.getBoundingClientRect();
const clientX = e instanceof MouseEvent ? e.clientX : e.targetTouches[0].clientX;
onSeek(Math.max(Math.min(duration * ((clientX - left) / width), duration), 0));
}
}, [duration, onSeek]);
const handleStartSeek = useCallback((e: MouseEvent | TouchEvent) => {
isSeeking.current = true;
handleSeek(e);
}, [handleSeek]);
const handleStopSeek = useCallback(() => {
isSeeking.current = false;
}, []);
useEffect(() => {
if (!seekerRef.current || !isVisible) return undefined;
return captureEvents(seekerRef.current, {
onCapture: handleStartSeek,
onRelease: handleStopSeek,
onClick: handleStopSeek,
onDrag: handleSeek,
});
}, [isVisible, handleStartSeek, handleSeek, handleStopSeek]);
if (!isVisible && !isForceVisible) {
return undefined;
}
return (
<div className={`VideoPlayerControls ${isForceMobileVersion ? 'mobile' : ''}`} onClick={stopEvent}>
{renderSeekLine(currentTime, duration, bufferedProgress, onSeek)}
{renderSeekLine(currentTime, duration, bufferedProgress, seekerRef)}
<Button
ariaLabel={lang('AccActionPlay')}
size="tiny"
@ -136,12 +167,14 @@ function renderFileSize(downloadedPercent: number, totalSize: number) {
);
}
function renderSeekLine(currentTime: number, duration: number, bufferedProgress: number, onSeek: OnChangeHandler) {
function renderSeekLine(
currentTime: number, duration: number, bufferedProgress: number, seekerRef: React.RefObject<HTMLDivElement>,
) {
const percentagePlayed = (currentTime / duration) * 100;
const percentageBuffered = bufferedProgress * 100;
return (
<div className="player-seekline">
<div className="player-seekline" ref={seekerRef}>
<div className="player-seekline-track">
<div
className="player-seekline-buffered"
@ -153,15 +186,6 @@ function renderSeekLine(currentTime: number, duration: number, bufferedProgress:
// @ts-ignore teact feature
style={`width: ${percentagePlayed || 0}%`}
/>
<input
min="0"
max="100"
step={0.01}
type="range"
onInput={onSeek}
className="player-seekline-input"
value={percentagePlayed || 0}
/>
</div>
</div>
);

View File

@ -65,7 +65,7 @@
justify-content: center;
flex-direction: column;
flex-shrink: 1;
padding: .25rem .5rem;
padding: 0 .5rem;
position: relative;
overflow: hidden;
cursor: pointer;
@ -85,22 +85,20 @@
font-weight: 500;
font-size: 0.875rem !important;
line-height: 1rem;
height: 1rem;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
body.is-ios & {
font-size: .9375rem !important;
line-height: .8125rem;
line-height: 1.125rem;
}
}
.subtitle {
font-size: 0.85rem !important;
color: var(--color-text-secondary);
line-height: 1.125rem;
height: 1.125rem;
line-height: 0.85rem;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
@ -108,6 +106,7 @@
body.is-ios & {
font-size: .9375rem !important;
line-height: 1.125rem;
}
}
}

View File

@ -1,6 +1,7 @@
import React, { FC, useCallback } from '../../lib/teact/teact';
import { withGlobal } from '../../lib/teact/teactn';
import { AudioOrigin } from '../../types';
import { GlobalActions } from '../../global/types';
import {
ApiAudio, ApiChat, ApiMessage, ApiUser,
@ -9,14 +10,17 @@ import {
import { IS_SINGLE_COLUMN_LAYOUT } from '../../util/environment';
import * as mediaLoader from '../../util/mediaLoader';
import {
getMediaDuration, getMessageAudio, getMessageKey, getMessageMediaHash, getSenderTitle,
getMediaDuration, getMessageContent, getMessageMediaHash, getSenderTitle,
} from '../../modules/helpers';
import { selectSender } from '../../modules/selectors';
import { selectChat, selectSender } from '../../modules/selectors';
import { pick } from '../../util/iteratees';
import renderText from '../common/helpers/renderText';
import useAudioPlayer from '../../hooks/useAudioPlayer';
import buildClassName from '../../util/buildClassName';
import useLang from '../../hooks/useLang';
import useMessageMediaMetadata from '../../hooks/useMessageMediaMetadata';
import { makeTrackId } from '../../util/audioPlayer';
import { clearMediaSession } from '../../util/mediaSession';
import RippleEffect from '../ui/RippleEffect';
import Button from '../ui/Button';
@ -25,25 +29,40 @@ import './AudioPlayer.scss';
type OwnProps = {
message: ApiMessage;
origin?: AudioOrigin;
className?: string;
noUi?: boolean;
};
type StateProps = {
sender?: ApiChat | ApiUser;
chat?: ApiChat;
};
type DispatchProps = Pick<GlobalActions, 'focusMessage' | 'closeAudioPlayer'>;
const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
message, className, noUi, sender, focusMessage, closeAudioPlayer,
message, origin = AudioOrigin.Inline, className, noUi, sender, focusMessage, closeAudioPlayer, chat,
}) => {
const lang = useLang();
const { audio, voice } = getMessageContent(message);
const isVoice = Boolean(voice);
const senderName = sender ? getSenderTitle(lang, sender) : undefined;
const mediaData = mediaLoader.getFromMemory(getMessageMediaHash(message, 'inline')!) as (string | undefined);
const { playPause, isPlaying } = useAudioPlayer(
getMessageKey(message), getMediaDuration(message)!, mediaData, undefined, undefined, true,
const mediaMetadata = useMessageMediaMetadata(message, sender, chat);
const { playPause, stop, isPlaying } = useAudioPlayer(
makeTrackId(message),
getMediaDuration(message)!,
isVoice ? 'voice' : 'audio',
origin,
mediaData,
undefined,
mediaMetadata,
undefined,
true,
undefined,
undefined,
true,
);
const handleClick = useCallback(() => {
@ -55,14 +74,14 @@ const AudioPlayer: FC<OwnProps & StateProps & DispatchProps> = ({
playPause();
}
closeAudioPlayer();
}, [closeAudioPlayer, isPlaying, playPause]);
clearMediaSession();
stop();
}, [closeAudioPlayer, isPlaying, playPause, stop]);
if (noUi) {
return undefined;
}
const audio = getMessageAudio(message);
return (
<div className={buildClassName('AudioPlayer', className)} dir={lang.isRtl ? 'rtl' : undefined}>
<Button
@ -122,8 +141,12 @@ function renderVoice(subtitle: string, senderName?: string) {
export default withGlobal<OwnProps>(
(global, { message }): StateProps => {
const sender = selectSender(global, message);
const chat = selectChat(global, message.chatId);
return { sender };
return {
sender,
chat,
};
},
(setGlobal, actions): DispatchProps => pick(actions, ['focusMessage', 'closeAudioPlayer']),
)(AudioPlayer);

View File

@ -226,13 +226,15 @@
top: 1.375rem;
}
html.theme-dark &.own .Audio .ProgressSpinner {
background-image: url(data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTUiIGhlaWdodD0iMTUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PHBhdGggZD0iTTguMjE4IDcuNWw1LjYzMy01LjYzM2EuNTA4LjUwOCAwIDEwLS43MTgtLjcxOEw3LjUgNi43ODIgMS44NjcgMS4xNDlhLjUwOC41MDggMCAxMC0uNzE4LjcxOEw2Ljc4MiA3LjVsLTUuNjMzIDUuNjMzYS41MDguNTA4IDAgMTAuNzE4LjcxOEw3LjUgOC4yMThsNS42MzMgNS42MzNhLjUwNi41MDYgMCAwMC43MTggMCAuNTA4LjUwOCAwIDAwMC0uNzE4TDguMjE4IDcuNXoiIGZpbGw9IiNGRkYiIGZpbGwtcnVsZT0ibm9uemVybyIgc3Ryb2tlPSIjODA3QkQ1IiBzdHJva2UtbGluZWpvaW49InJvdW5kIi8+PC9zdmc+);
background-position: center;
background-size: 1rem;
html.theme-dark &.own .Audio .toggle-play:not(.with-image) + .media-loading {
.ProgressSpinner {
background-image: url(data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTUiIGhlaWdodD0iMTUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+PHBhdGggZD0iTTguMjE4IDcuNWw1LjYzMy01LjYzM2EuNTA4LjUwOCAwIDEwLS43MTgtLjcxOEw3LjUgNi43ODIgMS44NjcgMS4xNDlhLjUwOC41MDggMCAxMC0uNzE4LjcxOEw2Ljc4MiA3LjVsLTUuNjMzIDUuNjMzYS41MDguNTA4IDAgMTAuNzE4LjcxOEw3LjUgOC4yMThsNS42MzMgNS42MzNhLjUwNi41MDYgMCAwMC43MTggMCAuNTA4LjUwOCAwIDAwMC0uNzE4TDguMjE4IDcuNXoiIGZpbGw9IiNGRkYiIGZpbGwtcnVsZT0ibm9uemVybyIgc3Ryb2tlPSIjODA3QkQ1IiBzdHJva2UtbGluZWpvaW49InJvdW5kIi8+PC9zdmc+);
background-position: center;
background-size: 1rem;
circle {
stroke: var(--background-color);
circle {
stroke: var(--background-color);
}
}
}

View File

@ -17,7 +17,9 @@ import {
ApiChat,
ApiSticker,
} from '../../../api/types';
import { FocusDirection, IAlbum, ISettings } from '../../../types';
import {
AudioOrigin, FocusDirection, IAlbum, ISettings,
} from '../../../types';
import { IS_ANDROID, IS_TOUCH_ENV } from '../../../util/environment';
import { pick } from '../../../util/iteratees';
@ -547,6 +549,7 @@ const Message: FC<OwnProps & StateProps & DispatchProps> = ({
<Audio
theme={theme}
message={message}
origin={AudioOrigin.Inline}
uploadProgress={uploadProgress}
lastSyncTime={lastSyncTime}
isSelectable={isInDocumentGroup}

View File

@ -20,11 +20,11 @@
}
.message-time {
margin-right: .1875rem;
margin-inline-end: .1875rem;
}
.message-views {
margin-left: .1875rem;
margin-inline-start: .1875rem;
}
.message-signature {
@ -34,8 +34,8 @@
}
.icon-channelviews {
margin-left: 0.125rem;
margin-right: 0.375rem;
margin-inline-start: 0.125rem;
margin-inline-end: 0.375rem;
font-size: 1.125rem;
position: relative;
top: -0.0625rem;

View File

@ -326,7 +326,7 @@
}
&.audio {
min-width: 17.1875rem;
min-width: 20rem;
padding: .5rem .5rem .8125rem !important;
.Audio + .text-content {

View File

@ -2,7 +2,7 @@ import React, { useCallback } from '../../../../lib/teact/teact';
import { getDispatch } from '../../../../lib/teact/teactn';
import { isChatPrivate } from '../../../../modules/helpers';
import { IAlbum, MediaViewerOrigin } from '../../../../types';
import { AudioOrigin, IAlbum, MediaViewerOrigin } from '../../../../types';
import {
ApiChat, ApiMessage, ApiUser, MAIN_THREAD_ID,
} from '../../../../api/types';
@ -77,7 +77,7 @@ export default function useInnerHandlers(
}, [chatId, threadId, messageId, openMediaViewer, isScheduled]);
const handleAudioPlay = useCallback((): void => {
openAudioPlayer({ chatId, messageId });
openAudioPlayer({ chatId, messageId, origin: AudioOrigin.Inline });
}, [chatId, messageId, openAudioPlayer]);
const handleAlbumMediaClick = useCallback((albumMessageId: number): void => {

View File

@ -11,7 +11,7 @@ import {
} from '../../api/types';
import { GlobalActions } from '../../global/types';
import {
NewChatMembersProgress, ISettings, MediaViewerOrigin, ProfileState, ProfileTabType, SharedMediaType,
NewChatMembersProgress, ISettings, MediaViewerOrigin, ProfileState, ProfileTabType, SharedMediaType, AudioOrigin,
} from '../../types';
import {
@ -190,7 +190,7 @@ const Profile: FC<OwnProps & StateProps & DispatchProps> = ({
}, [profileId, openMediaViewer]);
const handlePlayAudio = useCallback((messageId: number) => {
openAudioPlayer({ chatId: profileId, messageId });
openAudioPlayer({ chatId: profileId, messageId, origin: AudioOrigin.SharedMedia });
}, [profileId, openAudioPlayer]);
const handleMemberClick = useCallback((id: number) => {
@ -325,7 +325,7 @@ const Profile: FC<OwnProps & StateProps & DispatchProps> = ({
key={id}
theme={theme}
message={chatMessages[id]}
target="sharedMedia"
origin={AudioOrigin.SharedMedia}
date={chatMessages[id].date}
lastSyncTime={lastSyncTime}
className="scroll-item"

View File

@ -30,7 +30,8 @@
height: 3.5rem;
border: 0;
border-radius: var(--border-radius-default);
background: transparent;
background-color: transparent;
background-size: cover;
padding: 0.625rem;
color: white;
line-height: 1.2;
@ -63,36 +64,36 @@
}
&.primary {
background: var(--color-primary);
background-color: var(--color-primary);
color: var(--color-white);
--ripple-color: rgba(0, 0, 0, .08);
@include active-styles() {
background: var(--color-primary-shade);
background-color: var(--color-primary-shade);
}
@include no-ripple-styles() {
background: var(--color-primary-shade-darker);
background-color: var(--color-primary-shade-darker);
}
}
&.secondary {
background: var(--color-background);
background-color: var(--color-background);
color: rgba(var(--color-text-secondary-rgb), 0.75);
--ripple-color: rgba(0, 0, 0, .08);
@include active-styles() {
background: var(--color-primary);
background-color: var(--color-primary);
color: white;
}
@include no-ripple-styles() {
background: var(--color-primary-shade);
background-color: var(--color-primary-shade);
}
}
&.gray {
background: var(--color-background);
background-color: var(--color-background);
color: var(--color-text-secondary);
--ripple-color: rgba(0, 0, 0, .08);
@ -101,54 +102,54 @@
}
@include no-ripple-styles() {
background: var(--color-chat-hover);
background-color: var(--color-chat-hover);
}
}
&.danger {
background: var(--color-background);
background-color: var(--color-background);
color: var(--color-error);
--ripple-color: rgba(var(--color-error-rgb), .16);
@include active-styles() {
background: var(--color-error);
background-color: var(--color-error);
color: var(--color-white);
}
@include no-ripple-styles() {
background: var(--color-error-shade);
background-color: var(--color-error-shade);
}
}
&.text {
background: none;
background-color: transparent;
&.primary {
color: var(--color-primary);
background: none;
background-color: transparent;
@include active-styles() {
background: rgba(var(--color-primary-shade-rgb), .08);
background-color: rgba(var(--color-primary-shade-rgb), .08);
}
@include no-ripple-styles() {
background: rgba(var(--color-primary-shade-rgb), .16);
background-color: rgba(var(--color-primary-shade-rgb), .16);
}
}
&.secondary {
background: none;
background-color: transparent;
color: var(--color-text-secondary);
}
&.danger {
@include active-styles() {
background: rgba(var(--color-error-rgb), .08);
background-color: rgba(var(--color-error-rgb), .08);
color: var(--color-error);
}
@include no-ripple-styles() {
background: rgba(var(--color-error-rgb), .16);
background-color: rgba(var(--color-error-rgb), .16);
}
}
}
@ -316,4 +317,13 @@
.emoji {
vertical-align: -3px;
}
.backdrop {
position: absolute;
top: 0;
right: 0;
bottom: 0;
left: 0;
background-color: rgba(0, 0, 0, 0.5);
}
}

View File

@ -17,6 +17,7 @@ export type OwnProps = {
children: any;
size?: 'default' | 'smaller' | 'tiny';
color?: 'primary' | 'secondary' | 'gray' | 'danger' | 'translucent' | 'translucent-white' | 'dark';
backgroundImage?: string;
className?: string;
round?: boolean;
pill?: boolean;
@ -54,6 +55,7 @@ const Button: FC<OwnProps> = ({
children,
size = 'default',
color = 'primary',
backgroundImage,
className,
round,
pill,
@ -91,6 +93,7 @@ const Button: FC<OwnProps> = ({
ripple && 'has-ripple',
faded && 'faded',
isClicked && 'clicked',
backgroundImage && 'with-image',
);
const handleClick = useCallback((e: ReactMouseEvent<HTMLButtonElement, MouseEvent>) => {
@ -146,13 +149,20 @@ const Button: FC<OwnProps> = ({
title={ariaLabel}
tabIndex={tabIndex}
dir={isRtl ? 'rtl' : undefined}
// @ts-ignore
style={backgroundImage ? `background-image: url(${backgroundImage})` : undefined}
>
{isLoading ? (
<div>
<span dir={isRtl ? 'auto' : undefined}>Please wait..</span>
<span dir={isRtl ? 'auto' : undefined}>Please wait...</span>
<Spinner color={isText ? 'blue' : 'white'} />
</div>
) : children}
) : (
<>
{backgroundImage && <div className="backdrop" />}
{children}
</>
)}
{!disabled && ripple && (
<RippleEffect />
)}

View File

@ -96,6 +96,10 @@
&::-webkit-slider-thumb {
-webkit-appearance: none;
}
&::-moz-slider-thumb {
-moz-appearance: none;
}
}
// Apply custom styles

View File

@ -44,6 +44,7 @@ import {
EmojiKeywords,
InlineBotSettings,
NewChatMembersProgress,
AudioOrigin,
} from '../types';
export type MessageListType = 'thread' | 'pinned' | 'scheduled';
@ -320,6 +321,8 @@ export type GlobalState = {
audioPlayer: {
chatId?: number;
messageId?: number;
threadId?: number;
origin?: AudioOrigin;
};
topPeers: {

25
src/hooks/useAsync.ts Normal file
View File

@ -0,0 +1,25 @@
import { useEffect, useState } from '../lib/teact/teact';
export const useAsync = <T>(fn: () => Promise<T>, deps: any[], defaultValue?: T) => {
const [isLoading, setIsLoading] = useState<boolean>(false);
const [error, setError] = useState<Error | undefined>();
const [result, setResult] = useState<T | undefined>(defaultValue);
useEffect(() => {
setIsLoading(true);
let wasCancelled = false;
fn().then((res) => {
if (wasCancelled) return;
setIsLoading(false);
setResult(res);
}, (err) => {
if (wasCancelled) return;
setIsLoading(false);
setError(err);
});
return () => {
wasCancelled = true;
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, deps);
return { isLoading, error, result };
};

View File

@ -1,23 +1,36 @@
import {
useCallback, useEffect, useRef, useState,
} from '../lib/teact/teact';
import { getDispatch } from '../lib/teact/teactn';
import { register } from '../util/audioPlayer';
import { AudioOrigin } from '../types';
import { register, Track } from '../util/audioPlayer';
import useEffectWithPrevDeps from './useEffectWithPrevDeps';
import { isSafariPatchInProgress } from '../util/patchSafariProgressiveAudio';
import useOnChange from './useOnChange';
import {
MediaSessionHandlers, registerMediaSession, setPlaybackState, setPositionState, updateMetadata,
} from '../util/mediaSession';
type Handler = (e: Event) => void;
const DEFAULT_SKIP_TIME = 10;
export default (
trackId: string,
originalDuration: number, // Sometimes incorrect for voice messages
trackType: Track['type'],
origin: AudioOrigin,
src?: string,
handlers?: Record<string, Handler>,
metadata?: MediaMetadata,
onInit?: (element: HTMLAudioElement) => void,
shouldPlay = false,
onForcePlay?: NoneToVoidFunction,
onTrackChange?: NoneToVoidFunction,
noPlaylist = false,
noProgressUpdates = false,
) => {
// eslint-disable-next-line no-null/no-null
const controllerRef = useRef<ReturnType<typeof register>>(null);
@ -27,25 +40,43 @@ export default (
const [playProgress, setPlayProgress] = useState<number>(0);
const handleTrackChange = useCallback(() => {
setIsPlaying(false);
if (onTrackChange) onTrackChange();
}, [onTrackChange]);
useOnChange(() => {
controllerRef.current = register(trackId, (eventName, e) => {
controllerRef.current = register(trackId, trackType, origin, (eventName, e) => {
switch (eventName) {
case 'onPlay':
setIsPlaying(true);
registerMediaSession(metadata, makeMediaHandlers(controllerRef));
setPlaybackState('playing');
break;
case 'onPause':
setIsPlaying(false);
setPlaybackState('paused');
break;
case 'onTimeUpdate': {
const { proxy } = controllerRef.current!;
const duration = proxy.duration && Number.isFinite(proxy.duration) ? proxy.duration : originalDuration;
setPlayProgress(proxy.currentTime / duration);
if (!noProgressUpdates) setPlayProgress(proxy.currentTime / duration);
setPositionState({
duration: proxy.duration,
playbackRate: proxy.playbackRate,
position: proxy.currentTime,
});
break;
}
case 'onEnded': {
setPlaybackState('paused');
break;
}
}
handlers?.[eventName]?.(e);
}, onForcePlay);
}, onForcePlay, handleTrackChange);
const { proxy } = controllerRef.current!;
@ -59,17 +90,21 @@ export default (
}
}, [trackId]);
useEffect(() => {
if (isPlaying && metadata) updateMetadata(metadata);
}, [metadata, isPlaying]);
const {
play, pause, setCurrentTime, proxy, destroy,
play, pause, setCurrentTime, proxy, destroy, setVolume, setCurrentOrigin, stop,
} = controllerRef.current!;
const duration = proxy.duration && Number.isFinite(proxy.duration) ? proxy.duration : originalDuration;
// RAF progress
useEffect(() => {
if (duration && !isSafariPatchInProgress(proxy)) {
if (duration && !isSafariPatchInProgress(proxy) && !noProgressUpdates) {
setPlayProgress(proxy.currentTime / duration);
}
}, [duration, playProgress, proxy]);
}, [duration, playProgress, proxy, noProgressUpdates]);
// Cleanup
useEffect(() => () => {
@ -92,20 +127,85 @@ export default (
}
}, [shouldPlay, src, isPlaying, play, proxy.src, proxy.paused]);
const playIfPresent = useCallback(() => {
if (src) {
setCurrentOrigin(origin);
play(src);
}
}, [src, origin, play, setCurrentOrigin]);
const playPause = useCallback(() => {
if (isPlaying) {
pause();
} else if (src) {
play(src);
} else {
playIfPresent();
}
}, [src, pause, play, isPlaying]);
}, [pause, playIfPresent, isPlaying]);
const setTime = useCallback((time: number) => {
setCurrentTime(time);
if (duration) {
setPlayProgress(proxy.currentTime / duration);
}
}, [duration, proxy, setCurrentTime]);
return {
isPlaying: isPlayingSync,
playProgress,
playPause,
setCurrentTime,
pause,
play: playIfPresent,
stop,
setCurrentTime: setTime,
setVolume,
audioProxy: proxy,
duration,
};
};
function makeMediaHandlers(controllerRef: React.RefObject<ReturnType<typeof register>>) {
let mediaHandlers: MediaSessionHandlers = {};
if (controllerRef && controllerRef.current) {
const {
play, pause, setCurrentTime, proxy, requestNextTrack, requestPreviousTrack, isFirst, isLast,
} = controllerRef.current;
mediaHandlers = {
play: () => {
play(proxy.src);
},
pause: () => {
pause();
},
stop: () => {
pause();
setCurrentTime(0);
getDispatch().closeAudioPlayer();
},
seekbackward: (event) => {
const skipTime = event.seekOffset || DEFAULT_SKIP_TIME;
setCurrentTime(Math.max(proxy.currentTime - skipTime, 0));
},
seekforward: (event) => {
const skipTime = event.seekOffset || DEFAULT_SKIP_TIME;
setCurrentTime(Math.min(proxy.currentTime + skipTime, proxy.duration));
},
seekTo: (event) => {
if (event.seekTime) {
setCurrentTime(event.seekTime);
}
},
};
if (!isLast()) {
mediaHandlers.nexttrack = () => {
requestNextTrack();
};
}
if (!isFirst()) {
mediaHandlers.previoustrack = () => {
requestPreviousTrack();
};
}
}
return mediaHandlers;
}

View File

@ -0,0 +1,71 @@
import { useMemo } from '../lib/teact/teact';
import {
ApiAudio, ApiChat, ApiMessage, ApiUser, ApiVoice,
} from '../api/types';
import useMedia from './useMedia';
import { useAsync } from './useAsync';
import {
getAudioHasCover, getChatAvatarHash, getChatTitle, getMessageContent, getMessageMediaHash, getSenderTitle,
} from '../modules/helpers';
import { getTranslation } from '../util/langProvider';
import { buildMediaMetadata } from '../util/mediaSession';
import { scaleImage, resizeImage } from '../util/imageResize';
import { AVATAR_FULL_DIMENSIONS } from '../components/common/helpers/mediaDimensions';
// @ts-ignore
import telegramLogoPath from '../assets/telegram-logo-filled.svg';
const LOGO_DIMENSIONS = { width: 200, height: 200 };
const MINIMAL_SIZE = 115; // spec says 100, but on Chrome 93 it's not showing
// TODO Add support for video in future
export default (message: ApiMessage, sender?: ApiUser | ApiChat, chat?: ApiChat): MediaMetadata | undefined => {
const { audio, voice } = getMessageContent(message);
const title = audio ? (audio.title || audio.fileName) : voice ? 'Voice message' : '';
const artist = (audio && audio.performer) || (sender && getSenderTitle(getTranslation, sender));
const album = (chat && getChatTitle(getTranslation, chat)) || 'Telegram';
const audioCoverHash = (audio && getAudioHasCover(audio) && getMessageMediaHash(message, 'pictogram'));
const avatarHash = sender && getChatAvatarHash(sender, 'big');
const hash = (audio && audioCoverHash) || (voice && avatarHash);
const media = useMedia(hash);
const size = getCoverSize(audio, voice, media);
const { result: url } = useAsync(() => makeGoodArtwork(media, size), [media, size], telegramLogoPath);
return useMemo(() => {
return buildMediaMetadata({
title,
artist,
album,
artwork: [{ src: url }],
});
}, [album, artist, title, url]);
};
function makeGoodArtwork(url?: string, size?: { width: number; height: number }): Promise<string> {
if (!url) return Promise.resolve(telegramLogoPath);
if (!size) return resizeImage(url, MINIMAL_SIZE, MINIMAL_SIZE);
const min = Math.min(size.width, size.height);
if (min < MINIMAL_SIZE) {
const ratio = MINIMAL_SIZE / min;
return scaleImage(url, ratio);
}
return Promise.resolve(url);
}
function getCoverSize(audio?: ApiAudio, voice?: ApiVoice, url?: string) {
if (!url) return LOGO_DIMENSIONS;
if (audio) {
if (!audio.thumbnailSizes || audio.thumbnailSizes.length === 0) return undefined;
const preferred = audio.thumbnailSizes.find((size) => size.type === 'm');
return preferred || audio.thumbnailSizes[0]; // Sometimes `m` is not present
}
if (voice) {
return AVATAR_FULL_DIMENSIONS;
}
return undefined;
}

View File

@ -159,7 +159,7 @@ addReducer('closeMediaViewer', (global) => {
addReducer('openAudioPlayer', (global, actions, payload) => {
const {
chatId, threadId, messageId,
chatId, threadId, messageId, origin,
} = payload!;
return {
@ -168,6 +168,7 @@ addReducer('openAudioPlayer', (global, actions, payload) => {
chatId,
threadId,
messageId,
origin,
},
};
});

View File

@ -203,7 +203,7 @@ export function getMessageMediaHash(
switch (target) {
case 'micro':
case 'pictogram':
return undefined;
return getAudioHasCover(audio) ? `${base}?size=m` : undefined;
case 'download':
return `${base}?download`;
default:
@ -232,6 +232,10 @@ function getVideoOrAudioBaseHash(media: ApiAudio | ApiVideo, base: string) {
return base;
}
export function getAudioHasCover(media: ApiAudio) {
return media.thumbnailSizes && media.thumbnailSizes.length > 0;
}
export function getMessageMediaFormat(
message: ApiMessage, target: Target,
): Exclude<ApiMediaFormat, ApiMediaFormat.Lottie>;

View File

@ -12,13 +12,15 @@ const CONTENT_NOT_SUPPORTED = 'The message is not supported on this version of T
const RE_LINK = new RegExp(RE_LINK_TEMPLATE, 'i');
const TRUNCATED_SUMMARY_LENGTH = 80;
export function getMessageKey(message: ApiMessage) {
export type MessageKey = string; // `msg${number}-${number}`;
export function getMessageKey(message: ApiMessage): MessageKey {
const { chatId, id } = message;
return `msg${chatId}-${id}`;
}
export function parseMessageKey(key: string) {
export function parseMessageKey(key: MessageKey) {
const match = key.match(/^msg(-?\d+)-(\d+)/)!;
return { chatId: Number(match[1]), messageId: Number(match[2]) };

View File

@ -237,6 +237,12 @@ export enum MediaViewerOrigin {
SearchResult,
}
export enum AudioOrigin {
Inline,
SharedMedia,
Search,
}
export enum ChatCreationProgress {
Idle,
InProgress,

View File

@ -1,24 +1,40 @@
import { getDispatch } from '../lib/teact/teactn';
import { AudioOrigin } from '../types';
import { ApiMessage } from '../api/types';
import { IS_SAFARI } from './environment';
import safePlay from './safePlay';
import { patchSafariProgressiveAudio, isSafariPatchInProgress } from './patchSafariProgressiveAudio';
import { getDispatch } from '../lib/teact/teactn';
import { parseMessageKey } from '../modules/helpers';
import { getMessageKey, parseMessageKey } from '../modules/helpers';
import { fastRaf } from './schedulers';
type Handler = (eventName: string, e: Event) => void;
type TrackId = string; // `${MessageKey}-${number}`;
interface Track {
export interface Track {
audio: HTMLAudioElement;
proxy: HTMLAudioElement;
type: 'voice' | 'audio';
origin: AudioOrigin;
handlers: Handler[];
onForcePlay?: NoneToVoidFunction;
onTrackChange?: NoneToVoidFunction;
}
const tracks = new Map<string, Track>();
let queue: string[] = [];
let voiceQueue: TrackId[] = [];
let musicQueue: TrackId[] = [];
let currentTrackId: string | undefined;
function createAudio(trackId: string, onForcePlay?: NoneToVoidFunction) {
function createAudio(
trackId: TrackId,
type: Track['type'],
origin: AudioOrigin,
onForcePlay?: NoneToVoidFunction,
onTrackChange?: NoneToVoidFunction,
): Track {
const audio = new Audio();
function handleEvent(eventName: string) {
@ -44,45 +60,69 @@ function createAudio(trackId: string, onForcePlay?: NoneToVoidFunction) {
audio.addEventListener('loadeddata', handleEvent('onLoadedData'));
audio.addEventListener('playing', handleEvent('onPlaying'));
audio.addEventListener('ended', () => {
if (!tracks.has(trackId)) {
return;
}
if (isSafariPatchInProgress(audio)) {
return;
}
const nextTrackId = queue[queue.indexOf(trackId) + 1];
if (!nextTrackId) {
return;
}
if (!tracks.has(nextTrackId)) {
// A bit hacky way to continue playlist when switching chat
getDispatch().openAudioPlayer(parseMessageKey(nextTrackId));
return;
}
const nextTrack = tracks.get(nextTrackId)!;
if (nextTrack.onForcePlay) {
nextTrack.onForcePlay();
}
currentTrackId = nextTrackId;
if (nextTrack.audio.src) {
safePlay(nextTrack.audio);
}
playNext(trackId);
});
return {
audio,
type,
proxy: new Proxy(audio, {
get: (origin, key: keyof HTMLAudioElement) => origin[key],
get: (target, key: keyof HTMLAudioElement) => target[key],
}),
origin,
handlers: [],
onForcePlay,
onTrackChange,
};
}
function playNext(trackId: TrackId, isReverseOrder?: boolean) {
const currentTrack = currentTrackId && tracks.get(currentTrackId);
if (currentTrack) {
currentTrack.audio.pause();
currentTrack.audio.currentTime = 0;
if (currentTrack.onTrackChange) currentTrack.onTrackChange();
}
const track = tracks.get(trackId)!;
const queue = getTrackQueue(track);
if (!queue) return;
const nextTrackId = findNextInQueue(queue, trackId, track.origin, isReverseOrder);
if (!nextTrackId) {
return;
}
if (!tracks.has(nextTrackId)) {
// A bit hacky way to continue playlist when switching chat
getDispatch().openAudioPlayer(parseMessageKey(splitTrackId(nextTrackId).messageKey));
return;
}
const nextTrack = tracks.get(nextTrackId)!;
if (currentTrack) nextTrack.origin = currentTrack.origin; // Preserve origin
if (nextTrack.onForcePlay) {
nextTrack.onForcePlay();
}
currentTrackId = nextTrackId;
if (nextTrack.audio.src) {
safePlay(nextTrack.audio);
}
}
export function stopCurrentAudio() {
const currentTrack = currentTrackId && tracks.get(currentTrackId);
if (currentTrack) {
@ -90,23 +130,34 @@ export function stopCurrentAudio() {
}
}
export function register(trackId: string, handler: Handler, onForcePlay?: NoneToVoidFunction) {
export function register(
trackId: string,
trackType: Track['type'],
origin: AudioOrigin,
handler: Handler,
onForcePlay?: NoneToVoidFunction,
onTrackChange?: NoneToVoidFunction,
) {
if (!tracks.has(trackId)) {
tracks.set(trackId, createAudio(trackId, onForcePlay));
if (!queue.includes(trackId)) {
queue.push(trackId);
}
const track = createAudio(trackId, trackType, origin, onForcePlay, onTrackChange);
tracks.set(trackId, track);
addTrackToQueue(track, trackId);
}
const { audio, proxy, handlers } = tracks.get(trackId)!;
handlers.push(handler);
return {
play(src: string) {
if (currentTrackId && currentTrackId !== trackId) {
tracks.get(currentTrackId)!.audio.pause();
if (!audio.paused) return;
const currentTrack = currentTrackId && tracks.get(currentTrackId);
if (currentTrack && currentTrackId !== trackId) {
currentTrack.audio.pause();
currentTrack.audio.currentTime = 0;
if (isSafariPatchInProgress(currentTrack.audio)) {
currentTrack.audio.dataset.preventPlayAfterPatch = 'true';
}
if (currentTrack.onTrackChange) currentTrack.onTrackChange();
}
currentTrackId = trackId;
@ -116,6 +167,7 @@ export function register(trackId: string, handler: Handler, onForcePlay?: NoneTo
audio.preload = 'auto';
if (src.includes('/progressive/') && IS_SAFARI) {
delete audio.dataset.preventPlayAfterPatch;
patchSafariProgressiveAudio(audio);
}
}
@ -123,20 +175,69 @@ export function register(trackId: string, handler: Handler, onForcePlay?: NoneTo
safePlay(audio);
},
setCurrentOrigin(audioOrigin: AudioOrigin) {
tracks.get(trackId)!.origin = audioOrigin;
},
pause() {
if (currentTrackId === trackId) {
audio.pause();
}
},
stop() {
if (currentTrackId === trackId) {
// Hack, reset src to remove default media session notification
const prevSrc = audio.src;
audio.pause();
// onPause not called otherwise, but required to sync UI
fastRaf(() => {
audio.src = '';
audio.src = prevSrc;
});
}
},
setCurrentTime(time: number) {
if (currentTrackId === trackId) {
audio.currentTime = time;
if (audio.fastSeek) {
audio.fastSeek(time);
} else {
audio.currentTime = time;
}
}
},
setVolume(volume: number) {
if (currentTrackId === trackId) {
audio.volume = volume;
}
},
proxy,
requestNextTrack() {
playNext(trackId);
},
isLast() {
const track = tracks.get(trackId)!;
const queue = getTrackQueue(track);
if (!queue) return true;
return !findNextInQueue(queue, trackId, tracks.get(trackId)!.origin);
},
isFirst() {
const track = tracks.get(trackId)!;
const queue = getTrackQueue(track);
if (!queue) return true;
return !findNextInQueue(queue, trackId, tracks.get(trackId)!.origin, true);
},
requestPreviousTrack() {
playNext(trackId, true);
},
destroy(shouldRemoveFromQueue = false) {
const track = tracks.get(trackId);
if (!track) {
@ -150,7 +251,7 @@ export function register(trackId: string, handler: Handler, onForcePlay?: NoneTo
tracks.delete(trackId);
if (shouldRemoveFromQueue) {
queue = queue.filter((id) => id !== trackId);
removeFromQueue(track, trackId);
}
if (trackId === currentTrackId) {
@ -160,3 +261,75 @@ export function register(trackId: string, handler: Handler, onForcePlay?: NoneTo
},
};
}
function getTrackQueue(track: Track) {
if (track.type === 'audio') {
return musicQueue;
}
if (track.type === 'voice') {
return voiceQueue;
}
return undefined;
}
function addTrackToQueue(track: Track, trackId: TrackId) {
if (track.type === 'audio' && !musicQueue.includes(trackId)) {
musicQueue.push(trackId);
musicQueue.sort(trackIdComparator);
}
if (track.type === 'voice' && !voiceQueue.includes(trackId)) {
voiceQueue.push(trackId);
voiceQueue.sort(trackIdComparator);
}
}
function removeFromQueue(track: Track, trackId: TrackId) {
if (track.type === 'audio') {
musicQueue = musicQueue.filter((el) => el !== trackId);
}
if (track.type === 'voice') {
voiceQueue = voiceQueue.filter((el) => el !== trackId);
}
}
function findNextInQueue(queue: TrackId[], current: TrackId, origin: AudioOrigin, isReverseOrder?: boolean) {
if (origin === AudioOrigin.Search) {
const index = queue.indexOf(current);
if (index < 0) return undefined;
const direction = isReverseOrder ? -1 : 1;
return queue[index + direction];
}
const { chatId } = parseMessageKey(splitTrackId(current).messageKey);
const chatAudio = queue.filter((id) => id.startsWith(`msg${chatId}`));
const index = chatAudio.indexOf(current);
if (index < 0) return undefined;
let direction = origin === AudioOrigin.Inline ? -1 : 1;
if (isReverseOrder) direction *= -1;
return chatAudio[index + direction];
}
export function makeTrackId(message: ApiMessage): TrackId {
return `${getMessageKey(message)}-${message.date}`;
}
function splitTrackId(trackId: TrackId) {
const messageKey = trackId.match(/^msg(-?\d+)-(\d+)/)![0];
const date = Number(trackId.split('-').pop());
return {
messageKey,
date,
};
}
// Descending order by date
function trackIdComparator(one?: TrackId, two?: TrackId) {
if (!one || !two) return 0;
const { date: dateOne, messageKey: keyOne } = splitTrackId(one);
const { date: dateTwo, messageKey: keyTwo } = splitTrackId(two);
const diff = dateTwo - dateOne;
return diff === 0 ? keyTwo.localeCompare(keyOne) : diff;
}

View File

@ -132,15 +132,19 @@ export function formatMediaDateTime(lang: LangFn, datetime: number | Date) {
return `${formatHumanDate(lang, date, true)}, ${formatTime(date)}`;
}
export function formatMediaDuration(duration: number) {
export function formatMediaDuration(duration: number, maxValue?: number) {
const hours = Math.floor(duration / 3600);
const minutes = Math.floor((duration % 3600) / 60);
const seconds = Math.floor(duration % 3600 % 60);
const maxHours = maxValue ? Math.floor(maxValue / 3600) : 0;
const maxMinutes = maxValue ? Math.floor((maxValue % 3600) / 60) : 0;
let string = '';
if (hours > 0) {
if (hours > 0 || maxHours > 0) {
string += `${String(hours).padStart(2, '0')}:`;
string += `${String(minutes).padStart(2, '0')}:`;
} else if (maxMinutes >= 10) {
string += `${String(minutes).padStart(2, '0')}:`;
} else {
string += `${String(minutes)}:`;
}

102
src/util/imageResize.ts Normal file
View File

@ -0,0 +1,102 @@
export function scaleImage(image: string | Blob, ratio: number, outputType: string = 'image/png'): Promise<string> {
const url = image instanceof Blob ? URL.createObjectURL(image) : image;
const img = new Image();
return new Promise((resolve) => {
img.onload = () => {
scale(img, img.width * ratio, img.height * ratio, outputType)
.then((blob) => URL.createObjectURL(blob))
.then(resolve)
.finally(() => {
if (image instanceof Blob) {
URL.revokeObjectURL(url); // Revoke blob url that we created
}
});
};
img.src = url;
});
}
export function resizeImage(
image: string | Blob, width: number, height: number, outputType: string = 'image/png',
): Promise<string> {
const url = image instanceof Blob ? URL.createObjectURL(image) : image;
const img = new Image();
return new Promise((resolve) => {
img.onload = () => {
scale(img, width, height, outputType)
.then((blob) => URL.createObjectURL(blob))
.then(resolve)
.finally(() => {
if (image instanceof Blob) {
URL.revokeObjectURL(url); // Revoke blob url that we created
}
});
};
img.src = url;
});
}
async function scale(
img: HTMLImageElement, width: number, height: number, outputType: string = 'image/png',
) {
// Safari does not have built-in resize method with quality control
if ('createImageBitmap' in window) {
const bitmap = await window.createImageBitmap(img,
{ resizeWidth: width, resizeHeight: height, resizeQuality: 'high' });
return new Promise((res) => {
const canvas = document.createElement('canvas');
canvas.width = bitmap.width;
canvas.height = bitmap.height;
const ctx = canvas.getContext('bitmaprenderer');
if (ctx) {
ctx.transferFromImageBitmap(bitmap);
} else {
canvas.getContext('2d')!.drawImage(bitmap, 0, 0);
}
canvas.toBlob(res, outputType);
});
} else {
return steppedScale(img, width, height, 0.5, outputType);
}
}
function steppedScale(
img: HTMLImageElement, width: number, height: number, step: number = 0.5, outputType: string = 'image/png',
): Promise<Blob | null> {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
const oc = document.createElement('canvas');
const octx = oc.getContext('2d')!;
canvas.width = width;
canvas.height = height;
if (img.width * step > width) { // For performance avoid unnecessary drawing
const mul = 1 / step;
let cur = {
width: Math.floor(img.width * step),
height: Math.floor(img.height * step),
};
oc.width = cur.width;
oc.height = cur.height;
octx.drawImage(img, 0, 0, cur.width, cur.height);
while (cur.width * step > width) {
cur = {
width: Math.floor(cur.width * step),
height: Math.floor(cur.height * step),
};
octx.drawImage(oc, 0, 0, cur.width * mul, cur.height * mul, 0, 0, cur.width, cur.height);
}
ctx.drawImage(oc, 0, 0, cur.width, cur.height, 0, 0, canvas.width, canvas.height);
} else {
ctx.drawImage(img, 0, 0, canvas.width, canvas.height);
}
return new Promise((resolve) => {
canvas.toBlob(resolve, outputType);
});
}

125
src/util/mediaSession.ts Normal file
View File

@ -0,0 +1,125 @@
export type MediaSessionHandlers = {
play?: () => void;
pause?: () => void;
stop?: () => void;
previoustrack?: () => void;
nexttrack?: () => void;
togglemicrophone?: () => void;
togglecamera?: () => void;
hangup?: () => void;
seekbackward?: (details: MediaSessionActionDetails) => void;
seekforward?: (details: MediaSessionActionDetails) => void;
seekTo?: ((details: MediaSessionActionDetails) => void);
};
interface MediaMetadataParameters {
title?: string;
artist?: string;
album?: string;
artwork?: MediaImage[];
}
const DEFAULT_HANDLERS = {
play: undefined,
pause: undefined,
seekbackward: undefined,
seekforward: undefined,
previoustrack: undefined,
nexttrack: undefined,
stop: undefined,
seekTo: undefined,
};
export function registerMediaSession(metadata?: MediaMetadata, handlers?: MediaSessionHandlers) {
const { mediaSession } = window.navigator;
if (mediaSession) {
if (metadata) updateMetadata(metadata);
if (handlers) setMediaSessionHandlers(handlers);
} else {
// eslint-disable-next-line no-console
console.warn('MediaSession API not supported in this browser');
}
}
export function updateMetadata(metadata?: MediaMetadata) {
const { mediaSession } = window.navigator;
if (mediaSession) {
// eslint-disable-next-line no-null/no-null
mediaSession.metadata = metadata !== undefined ? metadata : null;
}
}
export function setMediaSessionHandlers(handlers: MediaSessionHandlers) {
const { mediaSession } = window.navigator;
if (mediaSession) {
Object.entries({ ...DEFAULT_HANDLERS, ...handlers }).forEach(([key, handler]) => {
try {
// @ts-ignore API not standardized yet
mediaSession.setActionHandler(key, handler);
} catch (err) {
// Handler not supported, ignoring
}
});
}
}
export function clearMediaSession() {
const { mediaSession } = window.navigator;
if (mediaSession) {
// eslint-disable-next-line no-null/no-null
mediaSession.metadata = null;
setMediaSessionHandlers(DEFAULT_HANDLERS);
if (mediaSession.playbackState) mediaSession.playbackState = 'none';
if (mediaSession.setPositionState) mediaSession.setPositionState(undefined);
}
}
export function setPlaybackState(state: 'none' | 'paused' | 'playing' = 'none') {
const { mediaSession } = window.navigator;
if (mediaSession && mediaSession.playbackState) {
mediaSession.playbackState = state;
}
}
export function setPositionState(state?: MediaPositionState) {
if (!state || !state.position || !state.duration) return;
const { mediaSession } = window.navigator;
if (mediaSession && mediaSession.setPositionState) {
mediaSession.setPositionState(state);
}
}
export function setMicrophoneActive(active: boolean) {
const { mediaSession } = window.navigator;
// @ts-ignore typings not updated yet
if (mediaSession && mediaSession.setMicrophoneActive) {
// @ts-ignore
mediaSession.setMicrophoneActive(active);
}
}
export function setCameraActive(active: boolean) {
const { mediaSession } = window.navigator;
// @ts-ignore typings not updated yet
if (mediaSession && mediaSession.setCameraActive) {
// @ts-ignore
mediaSession.setCameraActive(active);
}
}
export function buildMediaMetadata({
title, artist, album, artwork,
}: MediaMetadataParameters) {
if ('MediaMetadata' in window) {
return new window.MediaMetadata({
title,
artist,
album,
artwork,
});
}
return undefined;
}

View File

@ -10,17 +10,16 @@ export function patchSafariProgressiveAudio(audioEl: HTMLAudioElement) {
audioEl.addEventListener('play', () => {
const t = audioEl.currentTime;
audioEl.dataset.patchForSafariInProgress = 'true';
function onProgress() {
if (!audioEl.buffered.length) {
return;
}
audioEl.dataset.patchForSafariInProgress = 'true';
audioEl.currentTime = audioEl.duration - 1;
audioEl.addEventListener('progress', () => {
delete audioEl.dataset.patchForSafariInProgress;
audioEl.currentTime = t;
if (audioEl.paused) {
if (audioEl.paused && !audioEl.dataset.preventPlayAfterPatch) {
audioEl.play();
}
}, { once: true });