mirror of
https://github.com/FranP-code/Baileys.git
synced 2025-10-13 00:32:22 +00:00
lint: 0 warnings left
This commit is contained in:
@@ -160,6 +160,7 @@ export const addTransactionCapability = (
|
||||
let tries = maxCommitRetries
|
||||
while(tries) {
|
||||
tries -= 1
|
||||
//eslint-disable-next-line max-depth
|
||||
try {
|
||||
await state.set(mutations)
|
||||
logger.trace({ dbQueriesInTransaction }, 'committed transaction')
|
||||
@@ -207,6 +208,7 @@ export const initAuthCreds = (): AuthenticationCreds => {
|
||||
accountSettings: {
|
||||
unarchiveChats: false
|
||||
},
|
||||
registered: false,
|
||||
pairingCode: undefined,
|
||||
lastPropHash: undefined,
|
||||
routingInfo: undefined,
|
||||
|
||||
@@ -35,7 +35,7 @@ export const captureEventStream = (ev: BaileysEventEmitter, filename: string) =>
|
||||
* @param filename filename containing event data
|
||||
* @param delayIntervalMs delay between each event emit
|
||||
*/
|
||||
export const readAndEmitEventStream = (filename: string, delayIntervalMs: number = 0) => {
|
||||
export const readAndEmitEventStream = (filename: string, delayIntervalMs = 0) => {
|
||||
const ev = new EventEmitter() as BaileysEventEmitter
|
||||
|
||||
const fireEvents = async() => {
|
||||
|
||||
@@ -38,7 +38,7 @@ const generateMac = (operation: proto.SyncdMutation.SyncdOperation, data: Buffer
|
||||
}
|
||||
|
||||
const buff = Buffer.from([r])
|
||||
return Buffer.concat([buff, Buffer.from(keyId as any, 'base64')])
|
||||
return Buffer.concat([buff, Buffer.from(keyId as string, 'base64')])
|
||||
}
|
||||
|
||||
const keyData = getKeyData()
|
||||
@@ -144,7 +144,7 @@ export const encodeSyncdPatch = async(
|
||||
})
|
||||
const encoded = proto.SyncActionData.encode(dataProto).finish()
|
||||
|
||||
const keyValue = mutationKeys(key!.keyData!)
|
||||
const keyValue = mutationKeys(key.keyData!)
|
||||
|
||||
const encValue = aesEncrypt(encoded, keyValue.valueEncryptionKey)
|
||||
const valueMac = generateMac(operation, encValue, encKeyId, keyValue.valueMacKey)
|
||||
@@ -196,7 +196,7 @@ export const decodeSyncdMutations = async(
|
||||
// indexKey used to HMAC sign record.index.blob
|
||||
// valueEncryptionKey used to AES-256-CBC encrypt record.value.blob[0:-32]
|
||||
// the remaining record.value.blob[0:-32] is the mac, it the HMAC sign of key.keyId + decoded proto data + length of bytes in keyId
|
||||
for(const msgMutation of msgMutations!) {
|
||||
for(const msgMutation of msgMutations) {
|
||||
// if it's a syncdmutation, get the operation property
|
||||
// otherwise, if it's only a record -- it'll be a SET mutation
|
||||
const operation = 'operation' in msgMutation ? msgMutation.operation : proto.SyncdMutation.SyncdOperation.SET
|
||||
@@ -236,7 +236,7 @@ export const decodeSyncdMutations = async(
|
||||
return ltGenerator.finish()
|
||||
|
||||
async function getKey(keyId: Uint8Array) {
|
||||
const base64Key = Buffer.from(keyId!).toString('base64')
|
||||
const base64Key = Buffer.from(keyId).toString('base64')
|
||||
const keyEnc = await getAppStateSyncKey(base64Key)
|
||||
if(!keyEnc) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode mutation`, { statusCode: 404, data: { msgMutations } })
|
||||
@@ -264,19 +264,19 @@ export const decodeSyncdPatch = async(
|
||||
const mainKey = mutationKeys(mainKeyObj.keyData!)
|
||||
const mutationmacs = msg.mutations!.map(mutation => mutation.record!.value!.blob!.slice(-32))
|
||||
|
||||
const patchMac = generatePatchMac(msg.snapshotMac!, mutationmacs, toNumber(msg.version!.version!), name, mainKey.patchMacKey)
|
||||
const patchMac = generatePatchMac(msg.snapshotMac!, mutationmacs, toNumber(msg.version!.version), name, mainKey.patchMacKey)
|
||||
if(Buffer.compare(patchMac, msg.patchMac!) !== 0) {
|
||||
throw new Boom('Invalid patch mac')
|
||||
}
|
||||
}
|
||||
|
||||
const result = await decodeSyncdMutations(msg!.mutations!, initialState, getAppStateSyncKey, onMutation, validateMacs)
|
||||
const result = await decodeSyncdMutations(msg.mutations!, initialState, getAppStateSyncKey, onMutation, validateMacs)
|
||||
return result
|
||||
}
|
||||
|
||||
export const extractSyncdPatches = async(
|
||||
result: BinaryNode,
|
||||
options: AxiosRequestConfig<any>
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
const syncNode = getBinaryNodeChild(result, 'sync')
|
||||
const collectionNodes = getBinaryNodeChildren(syncNode, 'collection')
|
||||
@@ -302,7 +302,7 @@ export const extractSyncdPatches = async(
|
||||
}
|
||||
|
||||
const blobRef = proto.ExternalBlobReference.decode(
|
||||
snapshotNode.content! as Buffer
|
||||
snapshotNode.content as Buffer
|
||||
)
|
||||
const data = await downloadExternalBlob(blobRef, options)
|
||||
snapshot = proto.SyncdSnapshot.decode(data)
|
||||
@@ -314,7 +314,7 @@ export const extractSyncdPatches = async(
|
||||
content = Buffer.from(Object.values(content))
|
||||
}
|
||||
|
||||
const syncd = proto.SyncdPatch.decode(content! as Uint8Array)
|
||||
const syncd = proto.SyncdPatch.decode(content as Uint8Array)
|
||||
if(!syncd.version) {
|
||||
syncd.version = { version: +collectionNode.attrs.version + 1 }
|
||||
}
|
||||
@@ -334,7 +334,7 @@ export const extractSyncdPatches = async(
|
||||
|
||||
export const downloadExternalBlob = async(
|
||||
blob: proto.IExternalBlobReference,
|
||||
options: AxiosRequestConfig<any>
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
const stream = await downloadContentFromMessage(blob, 'md-app-state', { options })
|
||||
const bufferArray: Buffer[] = []
|
||||
@@ -347,7 +347,7 @@ export const downloadExternalBlob = async(
|
||||
|
||||
export const downloadExternalPatch = async(
|
||||
blob: proto.IExternalBlobReference,
|
||||
options: AxiosRequestConfig<any>
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
const buffer = await downloadExternalBlob(blob, options)
|
||||
const syncData = proto.SyncdMutations.decode(buffer)
|
||||
@@ -359,10 +359,10 @@ export const decodeSyncdSnapshot = async(
|
||||
snapshot: proto.ISyncdSnapshot,
|
||||
getAppStateSyncKey: FetchAppStateSyncKey,
|
||||
minimumVersionNumber: number | undefined,
|
||||
validateMacs: boolean = true
|
||||
validateMacs = true
|
||||
) => {
|
||||
const newState = newLTHashState()
|
||||
newState.version = toNumber(snapshot.version!.version!)
|
||||
newState.version = toNumber(snapshot.version!.version)
|
||||
|
||||
const mutationMap: ChatMutationMap = {}
|
||||
const areMutationsRequired = typeof minimumVersionNumber === 'undefined'
|
||||
@@ -408,10 +408,10 @@ export const decodePatches = async(
|
||||
syncds: proto.ISyncdPatch[],
|
||||
initial: LTHashState,
|
||||
getAppStateSyncKey: FetchAppStateSyncKey,
|
||||
options: AxiosRequestConfig<any>,
|
||||
options: AxiosRequestConfig<{}>,
|
||||
minimumVersionNumber?: number,
|
||||
logger?: Logger,
|
||||
validateMacs: boolean = true
|
||||
validateMacs = true
|
||||
) => {
|
||||
const newState: LTHashState = {
|
||||
...initial,
|
||||
@@ -420,8 +420,7 @@ export const decodePatches = async(
|
||||
|
||||
const mutationMap: ChatMutationMap = {}
|
||||
|
||||
for(let i = 0; i < syncds.length; i++) {
|
||||
const syncd = syncds[i]
|
||||
for(const syncd of syncds) {
|
||||
const { version, keyId, snapshotMac } = syncd
|
||||
if(syncd.externalMutations) {
|
||||
logger?.trace({ name, version }, 'downloading external patch')
|
||||
@@ -430,7 +429,7 @@ export const decodePatches = async(
|
||||
syncd.mutations?.push(...ref.mutations)
|
||||
}
|
||||
|
||||
const patchVersion = toNumber(version!.version!)
|
||||
const patchVersion = toNumber(version!.version)
|
||||
|
||||
newState.version = patchVersion
|
||||
const shouldMutate = typeof minimumVersionNumber === 'undefined' || patchVersion > minimumVersionNumber
|
||||
@@ -736,7 +735,7 @@ export const processSyncAction = (
|
||||
{
|
||||
id,
|
||||
muteEndTime: action.muteAction?.muted
|
||||
? toNumber(action.muteAction!.muteEndTimestamp!)
|
||||
? toNumber(action.muteAction.muteEndTimestamp)
|
||||
: null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}
|
||||
@@ -794,7 +793,7 @@ export const processSyncAction = (
|
||||
]
|
||||
})
|
||||
} else if(action?.contactAction) {
|
||||
ev.emit('contacts.upsert', [{ id, name: action.contactAction!.fullName! }])
|
||||
ev.emit('contacts.upsert', [{ id, name: action.contactAction.fullName! }])
|
||||
} else if(action?.pushNameSetting) {
|
||||
const name = action?.pushNameSetting?.name
|
||||
if(name && me?.name !== name) {
|
||||
@@ -803,7 +802,7 @@ export const processSyncAction = (
|
||||
} else if(action?.pinAction) {
|
||||
ev.emit('chats.update', [{
|
||||
id,
|
||||
pinned: action.pinAction?.pinned ? toNumber(action.timestamp!) : null,
|
||||
pinned: action.pinAction?.pinned ? toNumber(action.timestamp) : null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}])
|
||||
} else if(action?.unarchiveChatsSetting) {
|
||||
@@ -831,7 +830,7 @@ export const processSyncAction = (
|
||||
ev.emit('chats.delete', [id])
|
||||
}
|
||||
} else if(action?.labelEditAction) {
|
||||
const { name, color, deleted, predefinedId } = action.labelEditAction!
|
||||
const { name, color, deleted, predefinedId } = action.labelEditAction
|
||||
|
||||
ev.emit('labels.edit', {
|
||||
id,
|
||||
|
||||
@@ -177,6 +177,7 @@ export const decryptMessageNode = (
|
||||
let msg: proto.IMessage = proto.Message.decode(e2eType !== 'plaintext' ? unpadRandomMax16(msgBuffer) : msgBuffer)
|
||||
msg = msg.deviceSentMessage?.message || msg
|
||||
if(msg.senderKeyDistributionMessage) {
|
||||
//eslint-disable-next-line max-depth
|
||||
try {
|
||||
await repository.processSenderKeyDistributionMessage({
|
||||
authorJid: author,
|
||||
|
||||
@@ -42,6 +42,7 @@ type BaileysBufferableEventEmitter = BaileysEventEmitter & {
|
||||
* */
|
||||
buffer(): void
|
||||
/** buffers all events till the promise completes */
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
createBufferedFunction<A extends any[], T>(work: (...args: A) => Promise<T>): ((...args: A) => Promise<T>)
|
||||
/**
|
||||
* flushes all buffered events
|
||||
@@ -132,7 +133,7 @@ export const makeEventBuffer = (logger: Logger): BaileysBufferableEventEmitter =
|
||||
},
|
||||
emit<T extends BaileysEvent>(event: BaileysEvent, evData: BaileysEventMap[T]) {
|
||||
if(buffersInProgress && BUFFERABLE_EVENT_SET.has(event)) {
|
||||
append(data, historyCache, event as any, evData, logger)
|
||||
append(data, historyCache, event as BufferableEvent, evData, logger)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -187,6 +188,7 @@ function append<E extends BufferableEvent>(
|
||||
data: BufferedEventData,
|
||||
historyCache: Set<string>,
|
||||
event: E,
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
eventData: any,
|
||||
logger: Logger
|
||||
) {
|
||||
@@ -331,7 +333,7 @@ function append<E extends BufferableEvent>(
|
||||
}
|
||||
|
||||
if(data.contactUpdates[contact.id]) {
|
||||
upsert = Object.assign(data.contactUpdates[contact.id], trimUndefined(contact))
|
||||
upsert = Object.assign(data.contactUpdates[contact.id], trimUndefined(contact)) as Contact
|
||||
delete data.contactUpdates[contact.id]
|
||||
}
|
||||
}
|
||||
@@ -598,12 +600,10 @@ function consolidateEvents(data: BufferedEventData) {
|
||||
}
|
||||
|
||||
function concatChats<C extends Partial<Chat>>(a: C, b: Partial<Chat>) {
|
||||
if(b.unreadCount === null) {
|
||||
// neutralize unread counter
|
||||
if(a.unreadCount! < 0) {
|
||||
a.unreadCount = undefined
|
||||
b.unreadCount = undefined
|
||||
}
|
||||
if(b.unreadCount === null && // neutralize unread counter
|
||||
a.unreadCount! < 0) {
|
||||
a.unreadCount = undefined
|
||||
b.unreadCount = undefined
|
||||
}
|
||||
|
||||
if(typeof a.unreadCount === 'number' && typeof b.unreadCount === 'number') {
|
||||
|
||||
@@ -5,7 +5,7 @@ import { platform, release } from 'os'
|
||||
import { Logger } from 'pino'
|
||||
import { proto } from '../../WAProto'
|
||||
import { version as baileysVersion } from '../Defaults/baileys-version.json'
|
||||
import { BaileysEventEmitter, BaileysEventMap, BrowsersMap, DisconnectReason, WACallUpdateType, WAVersion } from '../Types'
|
||||
import { BaileysEventEmitter, BaileysEventMap, BrowsersMap, ConnectionState, DisconnectReason, WACallUpdateType, WAVersion } from '../Types'
|
||||
import { BinaryNode, getAllBinaryNodeChildren, jidDecode } from '../WABinary'
|
||||
|
||||
const PLATFORM_MAP = {
|
||||
@@ -33,6 +33,7 @@ export const getPlatformId = (browser: string) => {
|
||||
}
|
||||
|
||||
export const BufferJSON = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
replacer: (k, value: any) => {
|
||||
if(Buffer.isBuffer(value) || value instanceof Uint8Array || value?.type === 'Buffer') {
|
||||
return { type: 'Buffer', data: Buffer.from(value?.data || value).toString('base64') }
|
||||
@@ -40,6 +41,8 @@ export const BufferJSON = {
|
||||
|
||||
return value
|
||||
},
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
reviver: (_, value: any) => {
|
||||
if(typeof value === 'object' && !!value && (value.buffer === true || value.type === 'Buffer')) {
|
||||
const val = value.data || value.value
|
||||
@@ -52,7 +55,7 @@ export const BufferJSON = {
|
||||
|
||||
export const getKeyAuthor = (
|
||||
key: proto.IMessageKey | undefined | null,
|
||||
meId: string = 'me'
|
||||
meId = 'me'
|
||||
) => (
|
||||
(key?.fromMe ? meId : key?.participant || key?.remoteJid) || ''
|
||||
)
|
||||
@@ -102,14 +105,14 @@ export const encodeBigEndian = (e: number, t = 4) => {
|
||||
return a
|
||||
}
|
||||
|
||||
export const toNumber = (t: Long | number | null | undefined): number => ((typeof t === 'object' && t) ? ('toNumber' in t ? t.toNumber() : (t as any).low) : t)
|
||||
export const toNumber = (t: Long | number | null | undefined): number => ((typeof t === 'object' && t) ? ('toNumber' in t ? t.toNumber() : (t as Long).low) : t || 0)
|
||||
|
||||
/** unix timestamp of a date in seconds */
|
||||
export const unixTimestampSeconds = (date: Date = new Date()) => Math.floor(date.getTime() / 1000)
|
||||
|
||||
export type DebouncedTimeout = ReturnType<typeof debouncedTimeout>
|
||||
|
||||
export const debouncedTimeout = (intervalMs: number = 1000, task?: () => void) => {
|
||||
export const debouncedTimeout = (intervalMs = 1000, task?: () => void) => {
|
||||
let timeout: NodeJS.Timeout | undefined
|
||||
return {
|
||||
start: (newIntervalMs?: number, newTask?: () => void) => {
|
||||
@@ -184,9 +187,9 @@ export const generateMessageIDV2 = (userId?: string): string => {
|
||||
const data = Buffer.alloc(8 + 20 + 16)
|
||||
data.writeBigUInt64BE(BigInt(Math.floor(Date.now() / 1000)))
|
||||
|
||||
if (userId) {
|
||||
if(userId) {
|
||||
const id = jidDecode(userId)
|
||||
if (id?.user) {
|
||||
if(id?.user) {
|
||||
data.write(id.user, 8)
|
||||
data.write('@c.us', 8 + id.user.length)
|
||||
}
|
||||
@@ -205,7 +208,7 @@ export const generateMessageID = () => '3EB0' + randomBytes(18).toString('hex').
|
||||
export function bindWaitForEvent<T extends keyof BaileysEventMap>(ev: BaileysEventEmitter, event: T) {
|
||||
return async(check: (u: BaileysEventMap[T]) => boolean | undefined, timeoutMs?: number) => {
|
||||
let listener: (item: BaileysEventMap[T]) => void
|
||||
let closeListener: any
|
||||
let closeListener: (state: Partial<ConnectionState>) => void
|
||||
await (
|
||||
promiseTimeout<void>(
|
||||
timeoutMs,
|
||||
@@ -256,7 +259,7 @@ export const printQRIfNecessaryListener = (ev: BaileysEventEmitter, logger: Logg
|
||||
* utility that fetches latest baileys version from the master branch.
|
||||
* Use to ensure your WA connection is always on the latest version
|
||||
*/
|
||||
export const fetchLatestBaileysVersion = async(options: AxiosRequestConfig<any> = { }) => {
|
||||
export const fetchLatestBaileysVersion = async(options: AxiosRequestConfig<{}> = { }) => {
|
||||
const URL = 'https://raw.githubusercontent.com/WhiskeySockets/Baileys/master/src/Defaults/baileys-version.json'
|
||||
try {
|
||||
const result = await axios.get<{ version: WAVersion }>(
|
||||
@@ -283,7 +286,7 @@ export const fetchLatestBaileysVersion = async(options: AxiosRequestConfig<any>
|
||||
* A utility that fetches the latest web version of whatsapp.
|
||||
* Use to ensure your WA connection is always on the latest version
|
||||
*/
|
||||
export const fetchLatestWaWebVersion = async(options: AxiosRequestConfig<any>) => {
|
||||
export const fetchLatestWaWebVersion = async(options: AxiosRequestConfig<{}>) => {
|
||||
try {
|
||||
const result = await axios.get(
|
||||
'https://web.whatsapp.com/check-update?version=1&platform=web',
|
||||
@@ -393,6 +396,7 @@ export const getCodeFromWSError = (error: Error) => {
|
||||
statusCode = code
|
||||
}
|
||||
} else if(
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(error as any)?.code?.startsWith('E')
|
||||
|| error?.message?.includes('timed out')
|
||||
) { // handle ETIMEOUT, ENOTFOUND etc
|
||||
@@ -410,7 +414,8 @@ export const isWABusinessPlatform = (platform: string) => {
|
||||
return platform === 'smbi' || platform === 'smba'
|
||||
}
|
||||
|
||||
export function trimUndefined(obj: any) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function trimUndefined(obj: {[_: string]: any}) {
|
||||
for(const key in obj) {
|
||||
if(typeof obj[key] === 'undefined') {
|
||||
delete obj[key]
|
||||
@@ -427,8 +432,8 @@ export function bytesToCrockford(buffer: Buffer): string {
|
||||
let bitCount = 0
|
||||
const crockford: string[] = []
|
||||
|
||||
for(let i = 0; i < buffer.length; i++) {
|
||||
value = (value << 8) | (buffer[i] & 0xff)
|
||||
for(const element of buffer) {
|
||||
value = (value << 8) | (element & 0xff)
|
||||
bitCount += 8
|
||||
|
||||
while(bitCount >= 5) {
|
||||
|
||||
@@ -12,7 +12,7 @@ const inflatePromise = promisify(inflate)
|
||||
|
||||
export const downloadHistory = async(
|
||||
msg: proto.Message.IHistorySyncNotification,
|
||||
options: AxiosRequestConfig<any>
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
const stream = await downloadContentFromMessage(msg, 'md-msg-hist', { options })
|
||||
const bufferArray: Buffer[] = []
|
||||
@@ -101,7 +101,7 @@ export const processHistoryMessage = (item: proto.IHistorySync) => {
|
||||
|
||||
export const downloadAndProcessHistorySyncNotification = async(
|
||||
msg: proto.Message.IHistorySyncNotification,
|
||||
options: AxiosRequestConfig<any>
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
const historyMsg = await downloadHistory(msg, options)
|
||||
return processHistoryMessage(historyMsg)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export const makeMutex = () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let task = Promise.resolve() as Promise<any>
|
||||
|
||||
let taskTimeout: NodeJS.Timeout | undefined
|
||||
|
||||
@@ -12,7 +12,7 @@ import { Readable, Transform } from 'stream'
|
||||
import { URL } from 'url'
|
||||
import { proto } from '../../WAProto'
|
||||
import { DEFAULT_ORIGIN, MEDIA_HKDF_KEY_MAPPING, MEDIA_PATH_MAP } from '../Defaults'
|
||||
import { BaileysEventMap, DownloadableMessage, MediaConnInfo, MediaDecryptionKeyInfo, MediaType, MessageType, SocketConfig, WAGenericMediaMessage, WAMediaUpload, WAMediaUploadFunction, WAMessageContent } from '../Types'
|
||||
import { BaileysEventMap, DownloadableMessage, MediaConnInfo, MediaDecryptionKeyInfo, MediaType, MessageType, SocketConfig, WAGenericMediaMessage, WAMediaPayloadURL, WAMediaUpload, WAMediaUploadFunction, WAMessageContent } from '../Types'
|
||||
import { BinaryNode, getBinaryNodeChild, getBinaryNodeChildBuffer, jidNormalizedUser } from '../WABinary'
|
||||
import { aesDecryptGCM, aesEncryptGCM, hkdf } from './crypto'
|
||||
import { generateMessageID } from './generics'
|
||||
@@ -79,7 +79,7 @@ const extractVideoThumb = async(
|
||||
destPath: string,
|
||||
time: string,
|
||||
size: { width: number, height: number },
|
||||
) => new Promise((resolve, reject) => {
|
||||
) => new Promise<void>((resolve, reject) => {
|
||||
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
||||
exec(cmd, (err) => {
|
||||
if(err) {
|
||||
@@ -88,7 +88,7 @@ const extractVideoThumb = async(
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
}) as Promise<void>
|
||||
})
|
||||
|
||||
export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | string, width = 32) => {
|
||||
if(bufferOrFilePath instanceof Readable) {
|
||||
@@ -97,7 +97,7 @@ export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | str
|
||||
|
||||
const lib = await getImageProcessingLibrary()
|
||||
if('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
const img = lib.sharp!.default(bufferOrFilePath)
|
||||
const img = lib.sharp.default(bufferOrFilePath)
|
||||
const dimensions = await img.metadata()
|
||||
|
||||
const buffer = await img
|
||||
@@ -114,7 +114,7 @@ export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | str
|
||||
} else if('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
const { read, MIME_JPEG, RESIZE_BILINEAR, AUTO } = lib.jimp
|
||||
|
||||
const jimp = await read(bufferOrFilePath as any)
|
||||
const jimp = await read(bufferOrFilePath as string)
|
||||
const dimensions = {
|
||||
width: jimp.getWidth(),
|
||||
height: jimp.getHeight()
|
||||
@@ -154,7 +154,7 @@ export const generateProfilePicture = async(mediaUpload: WAMediaUpload) => {
|
||||
const lib = await getImageProcessingLibrary()
|
||||
let img: Promise<Buffer>
|
||||
if('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
img = lib.sharp!.default(bufferOrFilePath)
|
||||
img = lib.sharp.default(bufferOrFilePath)
|
||||
.resize(640, 640)
|
||||
.jpeg({
|
||||
quality: 50,
|
||||
@@ -162,7 +162,7 @@ export const generateProfilePicture = async(mediaUpload: WAMediaUpload) => {
|
||||
.toBuffer()
|
||||
} else if('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
const { read, MIME_JPEG, RESIZE_BILINEAR } = lib.jimp
|
||||
const jimp = await read(bufferOrFilePath as any)
|
||||
const jimp = await read(bufferOrFilePath as string)
|
||||
const min = Math.min(jimp.getWidth(), jimp.getHeight())
|
||||
const cropped = jimp.crop(0, 0, min, min)
|
||||
|
||||
@@ -351,7 +351,7 @@ export const encryptedStream = async(
|
||||
let writeStream: WriteStream | undefined
|
||||
let didSaveToTmpPath = false
|
||||
if(type === 'file') {
|
||||
bodyPath = (media as any).url
|
||||
bodyPath = (media as WAMediaPayloadURL).url.toString()
|
||||
} else if(saveOriginalFileIfRequired) {
|
||||
bodyPath = join(getTmpFilesDirectory(), mediaType + generateMessageID())
|
||||
writeStream = createWriteStream(bodyPath)
|
||||
@@ -382,10 +382,8 @@ export const encryptedStream = async(
|
||||
}
|
||||
|
||||
sha256Plain = sha256Plain.update(data)
|
||||
if(writeStream) {
|
||||
if(!writeStream.write(data)) {
|
||||
await once(writeStream, 'drain')
|
||||
}
|
||||
if(writeStream && !writeStream.write(data)) {
|
||||
await once(writeStream, 'drain')
|
||||
}
|
||||
|
||||
onChunk(aes.update(data))
|
||||
@@ -455,7 +453,7 @@ const toSmallestChunkSize = (num: number) => {
|
||||
export type MediaDownloadOptions = {
|
||||
startByte?: number
|
||||
endByte?: number
|
||||
options?: AxiosRequestConfig<any>
|
||||
options?: AxiosRequestConfig<{}>
|
||||
}
|
||||
|
||||
export const getUrlFromDirectPath = (directPath: string) => `https://${DEF_HOST}${directPath}`
|
||||
@@ -501,9 +499,9 @@ export const downloadEncryptedContent = async(
|
||||
Origin: DEFAULT_ORIGIN,
|
||||
}
|
||||
if(startChunk || endChunk) {
|
||||
headers!.Range = `bytes=${startChunk}-`
|
||||
headers.Range = `bytes=${startChunk}-`
|
||||
if(endChunk) {
|
||||
headers!.Range += endChunk
|
||||
headers.Range += endChunk
|
||||
}
|
||||
}
|
||||
|
||||
@@ -614,6 +612,7 @@ export const getWAUploadToServer = (
|
||||
|
||||
const auth = encodeURIComponent(uploadInfo.auth) // the auth token
|
||||
const url = `https://${hostname}${MEDIA_PATH_MAP[mediaType]}/${fileEncSha256B64}?auth=${auth}&token=${fileEncSha256B64}`
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let result: any
|
||||
try {
|
||||
|
||||
@@ -770,9 +769,4 @@ const MEDIA_RETRY_STATUS_MAP = {
|
||||
[proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
|
||||
[proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
|
||||
[proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418,
|
||||
} as const
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
function __importStar(arg0: any): any {
|
||||
throw new Error('Function not implemented.')
|
||||
}
|
||||
} as const
|
||||
@@ -488,7 +488,7 @@ export const generateWAMessageContent = async(
|
||||
options: message.poll.values.map(optionName => ({ optionName })),
|
||||
}
|
||||
|
||||
if (message.poll.toAnnouncementGroup) {
|
||||
if(message.poll.toAnnouncementGroup) {
|
||||
// poll v2 is for community announcement groups (single select and multiple)
|
||||
m.pollCreationMessageV2 = pollCreationMessage
|
||||
} else {
|
||||
@@ -859,17 +859,13 @@ export const downloadMediaMessage = async<Type extends 'buffer' | 'stream'>(
|
||||
) => {
|
||||
const result = await downloadMsg()
|
||||
.catch(async(error) => {
|
||||
if(ctx) {
|
||||
if(axios.isAxiosError(error)) {
|
||||
// check if the message requires a reupload
|
||||
if(REUPLOAD_REQUIRED_STATUS.includes(error.response?.status!)) {
|
||||
ctx.logger.info({ key: message.key }, 'sending reupload media request...')
|
||||
// request reupload
|
||||
message = await ctx.reuploadRequest(message)
|
||||
const result = await downloadMsg()
|
||||
return result
|
||||
}
|
||||
}
|
||||
if(ctx && axios.isAxiosError(error) && // check if the message requires a reupload
|
||||
REUPLOAD_REQUIRED_STATUS.includes(error.response?.status!)) {
|
||||
ctx.logger.info({ key: message.key }, 'sending reupload media request...')
|
||||
// request reupload
|
||||
message = await ctx.reuploadRequest(message)
|
||||
const result = await downloadMsg()
|
||||
return result
|
||||
}
|
||||
|
||||
throw error
|
||||
|
||||
@@ -283,6 +283,7 @@ const processMessage = async(
|
||||
const { peerDataOperationResult } = response
|
||||
for(const result of peerDataOperationResult!) {
|
||||
const { placeholderMessageResendResponse: retryResponse } = result
|
||||
//eslint-disable-next-line max-depth
|
||||
if(retryResponse) {
|
||||
const webMessageInfo = proto.WebMessageInfo.decode(retryResponse.webMessageInfoBytes!)
|
||||
// wait till another upsert event is available, don't want it to be part of the PDO response message
|
||||
|
||||
@@ -73,7 +73,7 @@ export const parseAndInjectE2ESessions = async(
|
||||
const extractKey = (key: BinaryNode) => (
|
||||
key ? ({
|
||||
keyId: getBinaryNodeChildUInt(key, 'id', 3)!,
|
||||
publicKey: generateSignalPubKey(getBinaryNodeChildBuffer(key, 'value')!)!,
|
||||
publicKey: generateSignalPubKey(getBinaryNodeChildBuffer(key, 'value')!),
|
||||
signature: getBinaryNodeChildBuffer(key, 'signature')!,
|
||||
}) : undefined
|
||||
)
|
||||
@@ -125,8 +125,10 @@ export const extractDeviceJids = (result: BinaryNode, myJid: string, excludeZero
|
||||
const devicesNode = getBinaryNodeChild(item, 'devices')
|
||||
const deviceListNode = getBinaryNodeChild(devicesNode, 'device-list')
|
||||
if(Array.isArray(deviceListNode?.content)) {
|
||||
//eslint-disable-next-line max-depth
|
||||
for(const { tag, attrs } of deviceListNode!.content) {
|
||||
const device = +attrs.id
|
||||
//eslint-disable-next-line max-depth
|
||||
if(
|
||||
tag === 'device' && // ensure the "device" tag
|
||||
(!excludeZeroDevices || device !== 0) && // if zero devices are not-excluded, or device is non zero
|
||||
|
||||
@@ -21,7 +21,7 @@ const fileLock = new AsyncLock({ maxPending: Infinity })
|
||||
* Would recommend writing an auth state for use with a proper SQL or No-SQL DB
|
||||
* */
|
||||
export const useMultiFileAuthState = async(folder: string): Promise<{ state: AuthenticationState, saveCreds: () => Promise<void> }> => {
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const writeData = (data: any, file: string) => {
|
||||
const filePath = join(folder, fixFileName(file)!)
|
||||
return fileLock.acquire(
|
||||
|
||||
Reference in New Issue
Block a user