mirror of
https://github.com/FranP-code/Baileys.git
synced 2025-10-13 00:32:22 +00:00
chore: format everything
This commit is contained in:
@@ -1,7 +1,15 @@
|
||||
import NodeCache from '@cacheable/node-cache'
|
||||
import { randomBytes } from 'crypto'
|
||||
import { DEFAULT_CACHE_TTLS } from '../Defaults'
|
||||
import type { AuthenticationCreds, CacheStore, SignalDataSet, SignalDataTypeMap, SignalKeyStore, SignalKeyStoreWithTransaction, TransactionCapabilityOptions } from '../Types'
|
||||
import type {
|
||||
AuthenticationCreds,
|
||||
CacheStore,
|
||||
SignalDataSet,
|
||||
SignalDataTypeMap,
|
||||
SignalKeyStore,
|
||||
SignalKeyStoreWithTransaction,
|
||||
TransactionCapabilityOptions
|
||||
} from '../Types'
|
||||
import { Curve, signedKeyPair } from './crypto'
|
||||
import { delay, generateRegistrationId } from './generics'
|
||||
import { ILogger } from './logger'
|
||||
@@ -17,11 +25,13 @@ export function makeCacheableSignalKeyStore(
|
||||
logger?: ILogger,
|
||||
_cache?: CacheStore
|
||||
): SignalKeyStore {
|
||||
const cache = _cache || new NodeCache({
|
||||
stdTTL: DEFAULT_CACHE_TTLS.SIGNAL_STORE, // 5 minutes
|
||||
useClones: false,
|
||||
deleteOnExpire: true,
|
||||
})
|
||||
const cache =
|
||||
_cache ||
|
||||
new NodeCache({
|
||||
stdTTL: DEFAULT_CACHE_TTLS.SIGNAL_STORE, // 5 minutes
|
||||
useClones: false,
|
||||
deleteOnExpire: true
|
||||
})
|
||||
|
||||
function getUniqueId(type: string, id: string) {
|
||||
return `${type}.${id}`
|
||||
@@ -29,23 +39,23 @@ export function makeCacheableSignalKeyStore(
|
||||
|
||||
return {
|
||||
async get(type, ids) {
|
||||
const data: { [_: string]: SignalDataTypeMap[typeof type] } = { }
|
||||
const data: { [_: string]: SignalDataTypeMap[typeof type] } = {}
|
||||
const idsToFetch: string[] = []
|
||||
for(const id of ids) {
|
||||
for (const id of ids) {
|
||||
const item = cache.get<SignalDataTypeMap[typeof type]>(getUniqueId(type, id))
|
||||
if(typeof item !== 'undefined') {
|
||||
if (typeof item !== 'undefined') {
|
||||
data[id] = item
|
||||
} else {
|
||||
idsToFetch.push(id)
|
||||
}
|
||||
}
|
||||
|
||||
if(idsToFetch.length) {
|
||||
if (idsToFetch.length) {
|
||||
logger?.trace({ items: idsToFetch.length }, 'loading from store')
|
||||
const fetched = await store.get(type, idsToFetch)
|
||||
for(const id of idsToFetch) {
|
||||
for (const id of idsToFetch) {
|
||||
const item = fetched[id]
|
||||
if(item) {
|
||||
if (item) {
|
||||
data[id] = item
|
||||
cache.set(getUniqueId(type, id), item)
|
||||
}
|
||||
@@ -56,8 +66,8 @@ export function makeCacheableSignalKeyStore(
|
||||
},
|
||||
async set(data) {
|
||||
let keys = 0
|
||||
for(const type in data) {
|
||||
for(const id in data[type]) {
|
||||
for (const type in data) {
|
||||
for (const id in data[type]) {
|
||||
cache.set(getUniqueId(type, id), data[type][id])
|
||||
keys += 1
|
||||
}
|
||||
@@ -89,52 +99,45 @@ export const addTransactionCapability = (
|
||||
// number of queries made to the DB during the transaction
|
||||
// only there for logging purposes
|
||||
let dbQueriesInTransaction = 0
|
||||
let transactionCache: SignalDataSet = { }
|
||||
let mutations: SignalDataSet = { }
|
||||
let transactionCache: SignalDataSet = {}
|
||||
let mutations: SignalDataSet = {}
|
||||
|
||||
let transactionsInProgress = 0
|
||||
|
||||
return {
|
||||
get: async(type, ids) => {
|
||||
if(isInTransaction()) {
|
||||
get: async (type, ids) => {
|
||||
if (isInTransaction()) {
|
||||
const dict = transactionCache[type]
|
||||
const idsRequiringFetch = dict
|
||||
? ids.filter(item => typeof dict[item] === 'undefined')
|
||||
: ids
|
||||
const idsRequiringFetch = dict ? ids.filter(item => typeof dict[item] === 'undefined') : ids
|
||||
// only fetch if there are any items to fetch
|
||||
if(idsRequiringFetch.length) {
|
||||
if (idsRequiringFetch.length) {
|
||||
dbQueriesInTransaction += 1
|
||||
const result = await state.get(type, idsRequiringFetch)
|
||||
|
||||
transactionCache[type] ||= {}
|
||||
Object.assign(
|
||||
transactionCache[type]!,
|
||||
result
|
||||
)
|
||||
Object.assign(transactionCache[type]!, result)
|
||||
}
|
||||
|
||||
return ids.reduce(
|
||||
(dict, id) => {
|
||||
const value = transactionCache[type]?.[id]
|
||||
if(value) {
|
||||
dict[id] = value
|
||||
}
|
||||
return ids.reduce((dict, id) => {
|
||||
const value = transactionCache[type]?.[id]
|
||||
if (value) {
|
||||
dict[id] = value
|
||||
}
|
||||
|
||||
return dict
|
||||
}, { }
|
||||
)
|
||||
return dict
|
||||
}, {})
|
||||
} else {
|
||||
return state.get(type, ids)
|
||||
}
|
||||
},
|
||||
set: data => {
|
||||
if(isInTransaction()) {
|
||||
if (isInTransaction()) {
|
||||
logger.trace({ types: Object.keys(data) }, 'caching in transaction')
|
||||
for(const key in data) {
|
||||
transactionCache[key] = transactionCache[key] || { }
|
||||
for (const key in data) {
|
||||
transactionCache[key] = transactionCache[key] || {}
|
||||
Object.assign(transactionCache[key], data[key])
|
||||
|
||||
mutations[key] = mutations[key] || { }
|
||||
mutations[key] = mutations[key] || {}
|
||||
Object.assign(mutations[key], data[key])
|
||||
}
|
||||
} else {
|
||||
@@ -145,27 +148,27 @@ export const addTransactionCapability = (
|
||||
async transaction(work) {
|
||||
let result: Awaited<ReturnType<typeof work>>
|
||||
transactionsInProgress += 1
|
||||
if(transactionsInProgress === 1) {
|
||||
if (transactionsInProgress === 1) {
|
||||
logger.trace('entering transaction')
|
||||
}
|
||||
|
||||
try {
|
||||
result = await work()
|
||||
// commit if this is the outermost transaction
|
||||
if(transactionsInProgress === 1) {
|
||||
if(Object.keys(mutations).length) {
|
||||
if (transactionsInProgress === 1) {
|
||||
if (Object.keys(mutations).length) {
|
||||
logger.trace('committing transaction')
|
||||
// retry mechanism to ensure we've some recovery
|
||||
// in case a transaction fails in the first attempt
|
||||
let tries = maxCommitRetries
|
||||
while(tries) {
|
||||
while (tries) {
|
||||
tries -= 1
|
||||
//eslint-disable-next-line max-depth
|
||||
try {
|
||||
await state.set(mutations)
|
||||
logger.trace({ dbQueriesInTransaction }, 'committed transaction')
|
||||
break
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
logger.warn(`failed to commit ${Object.keys(mutations).length} mutations, tries left=${tries}`)
|
||||
await delay(delayBetweenTriesMs)
|
||||
}
|
||||
@@ -176,9 +179,9 @@ export const addTransactionCapability = (
|
||||
}
|
||||
} finally {
|
||||
transactionsInProgress -= 1
|
||||
if(transactionsInProgress === 0) {
|
||||
transactionCache = { }
|
||||
mutations = { }
|
||||
if (transactionsInProgress === 0) {
|
||||
transactionCache = {}
|
||||
mutations = {}
|
||||
dbQueriesInTransaction = 0
|
||||
}
|
||||
}
|
||||
@@ -211,6 +214,6 @@ export const initAuthCreds = (): AuthenticationCreds => {
|
||||
registered: false,
|
||||
pairingCode: undefined,
|
||||
lastPropHash: undefined,
|
||||
routingInfo: undefined,
|
||||
routingInfo: undefined
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,15 +16,13 @@ export const captureEventStream = (ev: BaileysEventEmitter, filename: string) =>
|
||||
// write mutex so data is appended in order
|
||||
const writeMutex = makeMutex()
|
||||
// monkey patch eventemitter to capture all events
|
||||
ev.emit = function(...args: any[]) {
|
||||
ev.emit = function (...args: any[]) {
|
||||
const content = JSON.stringify({ timestamp: Date.now(), event: args[0], data: args[1] }) + '\n'
|
||||
const result = oldEmit.apply(ev, args)
|
||||
|
||||
writeMutex.mutex(
|
||||
async() => {
|
||||
await writeFile(filename, content, { flag: 'a' })
|
||||
}
|
||||
)
|
||||
writeMutex.mutex(async () => {
|
||||
await writeFile(filename, content, { flag: 'a' })
|
||||
})
|
||||
|
||||
return result
|
||||
}
|
||||
@@ -38,7 +36,7 @@ export const captureEventStream = (ev: BaileysEventEmitter, filename: string) =>
|
||||
export const readAndEmitEventStream = (filename: string, delayIntervalMs = 0) => {
|
||||
const ev = new EventEmitter() as BaileysEventEmitter
|
||||
|
||||
const fireEvents = async() => {
|
||||
const fireEvents = async () => {
|
||||
// from: https://stackoverflow.com/questions/6156501/read-a-file-one-line-at-a-time-in-node-js
|
||||
const fileStream = createReadStream(filename)
|
||||
|
||||
@@ -49,10 +47,10 @@ export const readAndEmitEventStream = (filename: string, delayIntervalMs = 0) =>
|
||||
// Note: we use the crlfDelay option to recognize all instances of CR LF
|
||||
// ('\r\n') in input.txt as a single line break.
|
||||
for await (const line of rl) {
|
||||
if(line) {
|
||||
if (line) {
|
||||
const { event, data } = JSON.parse(line)
|
||||
ev.emit(event, data)
|
||||
delayIntervalMs && await delay(delayIntervalMs)
|
||||
delayIntervalMs && (await delay(delayIntervalMs))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,4 +61,4 @@ export const readAndEmitEventStream = (filename: string, delayIntervalMs = 0) =>
|
||||
ev,
|
||||
task: fireEvents()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
import { Boom } from '@hapi/boom'
|
||||
import { createHash } from 'crypto'
|
||||
import { CatalogCollection, CatalogStatus, OrderDetails, OrderProduct, Product, ProductCreate, ProductUpdate, WAMediaUpload, WAMediaUploadFunction } from '../Types'
|
||||
import {
|
||||
CatalogCollection,
|
||||
CatalogStatus,
|
||||
OrderDetails,
|
||||
OrderProduct,
|
||||
Product,
|
||||
ProductCreate,
|
||||
ProductUpdate,
|
||||
WAMediaUpload,
|
||||
WAMediaUploadFunction
|
||||
} from '../Types'
|
||||
import { BinaryNode, getBinaryNodeChild, getBinaryNodeChildren, getBinaryNodeChildString } from '../WABinary'
|
||||
import { getStream, getUrlFromDirectPath, toReadable } from './messages-media'
|
||||
|
||||
@@ -11,28 +21,24 @@ export const parseCatalogNode = (node: BinaryNode) => {
|
||||
|
||||
return {
|
||||
products,
|
||||
nextPageCursor: paging
|
||||
? getBinaryNodeChildString(paging, 'after')
|
||||
: undefined
|
||||
nextPageCursor: paging ? getBinaryNodeChildString(paging, 'after') : undefined
|
||||
}
|
||||
}
|
||||
|
||||
export const parseCollectionsNode = (node: BinaryNode) => {
|
||||
const collectionsNode = getBinaryNodeChild(node, 'collections')
|
||||
const collections = getBinaryNodeChildren(collectionsNode, 'collection').map<CatalogCollection>(
|
||||
collectionNode => {
|
||||
const id = getBinaryNodeChildString(collectionNode, 'id')!
|
||||
const name = getBinaryNodeChildString(collectionNode, 'name')!
|
||||
const collections = getBinaryNodeChildren(collectionsNode, 'collection').map<CatalogCollection>(collectionNode => {
|
||||
const id = getBinaryNodeChildString(collectionNode, 'id')!
|
||||
const name = getBinaryNodeChildString(collectionNode, 'name')!
|
||||
|
||||
const products = getBinaryNodeChildren(collectionNode, 'product').map(parseProductNode)
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
products,
|
||||
status: parseStatusInfo(collectionNode)
|
||||
}
|
||||
const products = getBinaryNodeChildren(collectionNode, 'product').map(parseProductNode)
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
products,
|
||||
status: parseStatusInfo(collectionNode)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
return {
|
||||
collections
|
||||
@@ -41,26 +47,24 @@ export const parseCollectionsNode = (node: BinaryNode) => {
|
||||
|
||||
export const parseOrderDetailsNode = (node: BinaryNode) => {
|
||||
const orderNode = getBinaryNodeChild(node, 'order')
|
||||
const products = getBinaryNodeChildren(orderNode, 'product').map<OrderProduct>(
|
||||
productNode => {
|
||||
const imageNode = getBinaryNodeChild(productNode, 'image')!
|
||||
return {
|
||||
id: getBinaryNodeChildString(productNode, 'id')!,
|
||||
name: getBinaryNodeChildString(productNode, 'name')!,
|
||||
imageUrl: getBinaryNodeChildString(imageNode, 'url')!,
|
||||
price: +getBinaryNodeChildString(productNode, 'price')!,
|
||||
currency: getBinaryNodeChildString(productNode, 'currency')!,
|
||||
quantity: +getBinaryNodeChildString(productNode, 'quantity')!
|
||||
}
|
||||
const products = getBinaryNodeChildren(orderNode, 'product').map<OrderProduct>(productNode => {
|
||||
const imageNode = getBinaryNodeChild(productNode, 'image')!
|
||||
return {
|
||||
id: getBinaryNodeChildString(productNode, 'id')!,
|
||||
name: getBinaryNodeChildString(productNode, 'name')!,
|
||||
imageUrl: getBinaryNodeChildString(imageNode, 'url')!,
|
||||
price: +getBinaryNodeChildString(productNode, 'price')!,
|
||||
currency: getBinaryNodeChildString(productNode, 'currency')!,
|
||||
quantity: +getBinaryNodeChildString(productNode, 'quantity')!
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
const priceNode = getBinaryNodeChild(orderNode, 'price')
|
||||
|
||||
const orderDetails: OrderDetails = {
|
||||
price: {
|
||||
total: +getBinaryNodeChildString(priceNode, 'total')!,
|
||||
currency: getBinaryNodeChildString(priceNode, 'currency')!,
|
||||
currency: getBinaryNodeChildString(priceNode, 'currency')!
|
||||
},
|
||||
products
|
||||
}
|
||||
@@ -69,94 +73,92 @@ export const parseOrderDetailsNode = (node: BinaryNode) => {
|
||||
}
|
||||
|
||||
export const toProductNode = (productId: string | undefined, product: ProductCreate | ProductUpdate) => {
|
||||
const attrs: BinaryNode['attrs'] = { }
|
||||
const content: BinaryNode[] = [ ]
|
||||
const attrs: BinaryNode['attrs'] = {}
|
||||
const content: BinaryNode[] = []
|
||||
|
||||
if(typeof productId !== 'undefined') {
|
||||
if (typeof productId !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'id',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(productId)
|
||||
})
|
||||
}
|
||||
|
||||
if(typeof product.name !== 'undefined') {
|
||||
if (typeof product.name !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'name',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.name)
|
||||
})
|
||||
}
|
||||
|
||||
if(typeof product.description !== 'undefined') {
|
||||
if (typeof product.description !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'description',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.description)
|
||||
})
|
||||
}
|
||||
|
||||
if(typeof product.retailerId !== 'undefined') {
|
||||
if (typeof product.retailerId !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'retailer_id',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.retailerId)
|
||||
})
|
||||
}
|
||||
|
||||
if(product.images.length) {
|
||||
if (product.images.length) {
|
||||
content.push({
|
||||
tag: 'media',
|
||||
attrs: { },
|
||||
content: product.images.map(
|
||||
img => {
|
||||
if(!('url' in img)) {
|
||||
throw new Boom('Expected img for product to already be uploaded', { statusCode: 400 })
|
||||
}
|
||||
|
||||
return {
|
||||
tag: 'image',
|
||||
attrs: { },
|
||||
content: [
|
||||
{
|
||||
tag: 'url',
|
||||
attrs: { },
|
||||
content: Buffer.from(img.url.toString())
|
||||
}
|
||||
]
|
||||
}
|
||||
attrs: {},
|
||||
content: product.images.map(img => {
|
||||
if (!('url' in img)) {
|
||||
throw new Boom('Expected img for product to already be uploaded', { statusCode: 400 })
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
tag: 'image',
|
||||
attrs: {},
|
||||
content: [
|
||||
{
|
||||
tag: 'url',
|
||||
attrs: {},
|
||||
content: Buffer.from(img.url.toString())
|
||||
}
|
||||
]
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
if(typeof product.price !== 'undefined') {
|
||||
if (typeof product.price !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'price',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.price.toString())
|
||||
})
|
||||
}
|
||||
|
||||
if(typeof product.currency !== 'undefined') {
|
||||
if (typeof product.currency !== 'undefined') {
|
||||
content.push({
|
||||
tag: 'currency',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.currency)
|
||||
})
|
||||
}
|
||||
|
||||
if('originCountryCode' in product) {
|
||||
if(typeof product.originCountryCode === 'undefined') {
|
||||
if ('originCountryCode' in product) {
|
||||
if (typeof product.originCountryCode === 'undefined') {
|
||||
attrs['compliance_category'] = 'COUNTRY_ORIGIN_EXEMPT'
|
||||
} else {
|
||||
content.push({
|
||||
tag: 'compliance_info',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: [
|
||||
{
|
||||
tag: 'country_code_origin',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: Buffer.from(product.originCountryCode)
|
||||
}
|
||||
]
|
||||
@@ -164,8 +166,7 @@ export const toProductNode = (productId: string | undefined, product: ProductCre
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if(typeof product.isHidden !== 'undefined') {
|
||||
if (typeof product.isHidden !== 'undefined') {
|
||||
attrs['is_hidden'] = product.isHidden.toString()
|
||||
}
|
||||
|
||||
@@ -188,16 +189,16 @@ export const parseProductNode = (productNode: BinaryNode) => {
|
||||
id,
|
||||
imageUrls: parseImageUrls(mediaNode),
|
||||
reviewStatus: {
|
||||
whatsapp: getBinaryNodeChildString(statusInfoNode, 'status')!,
|
||||
whatsapp: getBinaryNodeChildString(statusInfoNode, 'status')!
|
||||
},
|
||||
availability: 'in stock',
|
||||
name: getBinaryNodeChildString(productNode, 'name')!,
|
||||
retailerId: getBinaryNodeChildString(productNode, 'retailer_id'),
|
||||
url: getBinaryNodeChildString(productNode, 'url'),
|
||||
description: getBinaryNodeChildString(productNode, 'description')!,
|
||||
price: +getBinaryNodeChildString(productNode, 'price')!,
|
||||
price: +getBinaryNodeChildString(productNode, 'price')!,
|
||||
currency: getBinaryNodeChildString(productNode, 'currency')!,
|
||||
isHidden,
|
||||
isHidden
|
||||
}
|
||||
|
||||
return product
|
||||
@@ -206,10 +207,16 @@ export const parseProductNode = (productNode: BinaryNode) => {
|
||||
/**
|
||||
* Uploads images not already uploaded to WA's servers
|
||||
*/
|
||||
export async function uploadingNecessaryImagesOfProduct<T extends ProductUpdate | ProductCreate>(product: T, waUploadToServer: WAMediaUploadFunction, timeoutMs = 30_000) {
|
||||
export async function uploadingNecessaryImagesOfProduct<T extends ProductUpdate | ProductCreate>(
|
||||
product: T,
|
||||
waUploadToServer: WAMediaUploadFunction,
|
||||
timeoutMs = 30_000
|
||||
) {
|
||||
product = {
|
||||
...product,
|
||||
images: product.images ? await uploadingNecessaryImages(product.images, waUploadToServer, timeoutMs) : product.images
|
||||
images: product.images
|
||||
? await uploadingNecessaryImages(product.images, waUploadToServer, timeoutMs)
|
||||
: product.images
|
||||
}
|
||||
return product
|
||||
}
|
||||
@@ -217,43 +224,37 @@ export async function uploadingNecessaryImagesOfProduct<T extends ProductUpdate
|
||||
/**
|
||||
* Uploads images not already uploaded to WA's servers
|
||||
*/
|
||||
export const uploadingNecessaryImages = async(
|
||||
export const uploadingNecessaryImages = async (
|
||||
images: WAMediaUpload[],
|
||||
waUploadToServer: WAMediaUploadFunction,
|
||||
timeoutMs = 30_000
|
||||
) => {
|
||||
const results = await Promise.all(
|
||||
images.map<Promise<{ url: string }>>(
|
||||
async img => {
|
||||
|
||||
if('url' in img) {
|
||||
const url = img.url.toString()
|
||||
if(url.includes('.whatsapp.net')) {
|
||||
return { url }
|
||||
}
|
||||
images.map<Promise<{ url: string }>>(async img => {
|
||||
if ('url' in img) {
|
||||
const url = img.url.toString()
|
||||
if (url.includes('.whatsapp.net')) {
|
||||
return { url }
|
||||
}
|
||||
|
||||
const { stream } = await getStream(img)
|
||||
const hasher = createHash('sha256')
|
||||
const contentBlocks: Buffer[] = []
|
||||
for await (const block of stream) {
|
||||
hasher.update(block)
|
||||
contentBlocks.push(block)
|
||||
}
|
||||
|
||||
const sha = hasher.digest('base64')
|
||||
|
||||
const { directPath } = await waUploadToServer(
|
||||
toReadable(Buffer.concat(contentBlocks)),
|
||||
{
|
||||
mediaType: 'product-catalog-image',
|
||||
fileEncSha256B64: sha,
|
||||
timeoutMs
|
||||
}
|
||||
)
|
||||
return { url: getUrlFromDirectPath(directPath) }
|
||||
}
|
||||
)
|
||||
|
||||
const { stream } = await getStream(img)
|
||||
const hasher = createHash('sha256')
|
||||
const contentBlocks: Buffer[] = []
|
||||
for await (const block of stream) {
|
||||
hasher.update(block)
|
||||
contentBlocks.push(block)
|
||||
}
|
||||
|
||||
const sha = hasher.digest('base64')
|
||||
|
||||
const { directPath } = await waUploadToServer(toReadable(Buffer.concat(contentBlocks)), {
|
||||
mediaType: 'product-catalog-image',
|
||||
fileEncSha256B64: sha,
|
||||
timeoutMs
|
||||
})
|
||||
return { url: getUrlFromDirectPath(directPath) }
|
||||
})
|
||||
)
|
||||
return results
|
||||
}
|
||||
@@ -270,6 +271,6 @@ const parseStatusInfo = (mediaNode: BinaryNode): CatalogStatus => {
|
||||
const node = getBinaryNodeChild(mediaNode, 'status_info')
|
||||
return {
|
||||
status: getBinaryNodeChildString(node, 'status')!,
|
||||
canAppeal: getBinaryNodeChildString(node, 'can_appeal') === 'true',
|
||||
canAppeal: getBinaryNodeChildString(node, 'can_appeal') === 'true'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,32 @@
|
||||
import { Boom } from '@hapi/boom'
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
import { proto } from '../../WAProto'
|
||||
import { BaileysEventEmitter, Chat, ChatModification, ChatMutation, ChatUpdate, Contact, InitialAppStateSyncOptions, LastMessageList, LTHashState, WAPatchCreate, WAPatchName } from '../Types'
|
||||
import {
|
||||
BaileysEventEmitter,
|
||||
Chat,
|
||||
ChatModification,
|
||||
ChatMutation,
|
||||
ChatUpdate,
|
||||
Contact,
|
||||
InitialAppStateSyncOptions,
|
||||
LastMessageList,
|
||||
LTHashState,
|
||||
WAPatchCreate,
|
||||
WAPatchName
|
||||
} from '../Types'
|
||||
import { ChatLabelAssociation, LabelAssociationType, MessageLabelAssociation } from '../Types/LabelAssociation'
|
||||
import { BinaryNode, getBinaryNodeChild, getBinaryNodeChildren, isJidGroup, jidNormalizedUser } from '../WABinary'
|
||||
import { aesDecrypt, aesEncrypt, hkdf, hmacSign } from './crypto'
|
||||
import { toNumber } from './generics'
|
||||
import { ILogger } from './logger'
|
||||
import { LT_HASH_ANTI_TAMPERING } from './lt-hash'
|
||||
import { downloadContentFromMessage, } from './messages-media'
|
||||
import { downloadContentFromMessage } from './messages-media'
|
||||
|
||||
type FetchAppStateSyncKey = (keyId: string) => Promise<proto.Message.IAppStateSyncKeyData | null | undefined>
|
||||
|
||||
export type ChatMutationMap = { [index: string]: ChatMutation }
|
||||
|
||||
const mutationKeys = async(keydata: Uint8Array) => {
|
||||
const mutationKeys = async (keydata: Uint8Array) => {
|
||||
const expanded = await hkdf(keydata, 160, { info: 'WhatsApp Mutation Keys' })
|
||||
return {
|
||||
indexKey: expanded.slice(0, 32),
|
||||
@@ -25,16 +37,21 @@ const mutationKeys = async(keydata: Uint8Array) => {
|
||||
}
|
||||
}
|
||||
|
||||
const generateMac = (operation: proto.SyncdMutation.SyncdOperation, data: Buffer, keyId: Uint8Array | string, key: Buffer) => {
|
||||
const generateMac = (
|
||||
operation: proto.SyncdMutation.SyncdOperation,
|
||||
data: Buffer,
|
||||
keyId: Uint8Array | string,
|
||||
key: Buffer
|
||||
) => {
|
||||
const getKeyData = () => {
|
||||
let r: number
|
||||
switch (operation) {
|
||||
case proto.SyncdMutation.SyncdOperation.SET:
|
||||
r = 0x01
|
||||
break
|
||||
case proto.SyncdMutation.SyncdOperation.REMOVE:
|
||||
r = 0x02
|
||||
break
|
||||
case proto.SyncdMutation.SyncdOperation.SET:
|
||||
r = 0x01
|
||||
break
|
||||
case proto.SyncdMutation.SyncdOperation.REMOVE:
|
||||
r = 0x02
|
||||
break
|
||||
}
|
||||
|
||||
const buff = Buffer.from([r])
|
||||
@@ -58,7 +75,7 @@ const to64BitNetworkOrder = (e: number) => {
|
||||
return buff
|
||||
}
|
||||
|
||||
type Mac = { indexMac: Uint8Array, valueMac: Uint8Array, operation: proto.SyncdMutation.SyncdOperation }
|
||||
type Mac = { indexMac: Uint8Array; valueMac: Uint8Array; operation: proto.SyncdMutation.SyncdOperation }
|
||||
|
||||
const makeLtHashGenerator = ({ indexValueMap, hash }: Pick<LTHashState, 'hash' | 'indexValueMap'>) => {
|
||||
indexValueMap = { ...indexValueMap }
|
||||
@@ -69,8 +86,8 @@ const makeLtHashGenerator = ({ indexValueMap, hash }: Pick<LTHashState, 'hash' |
|
||||
mix: ({ indexMac, valueMac, operation }: Mac) => {
|
||||
const indexMacBase64 = Buffer.from(indexMac).toString('base64')
|
||||
const prevOp = indexValueMap[indexMacBase64]
|
||||
if(operation === proto.SyncdMutation.SyncdOperation.REMOVE) {
|
||||
if(!prevOp) {
|
||||
if (operation === proto.SyncdMutation.SyncdOperation.REMOVE) {
|
||||
if (!prevOp) {
|
||||
throw new Boom('tried remove, but no previous op', { data: { indexMac, valueMac } })
|
||||
}
|
||||
|
||||
@@ -82,11 +99,11 @@ const makeLtHashGenerator = ({ indexValueMap, hash }: Pick<LTHashState, 'hash' |
|
||||
indexValueMap[indexMacBase64] = { valueMac }
|
||||
}
|
||||
|
||||
if(prevOp) {
|
||||
if (prevOp) {
|
||||
subBuffs.push(new Uint8Array(prevOp.valueMac).buffer)
|
||||
}
|
||||
},
|
||||
finish: async() => {
|
||||
finish: async () => {
|
||||
const hashArrayBuffer = new Uint8Array(hash).buffer
|
||||
const result = await LT_HASH_ANTI_TAMPERING.subtractThenAdd(hashArrayBuffer, addBuffs, subBuffs)
|
||||
const buffer = Buffer.from(result)
|
||||
@@ -100,34 +117,31 @@ const makeLtHashGenerator = ({ indexValueMap, hash }: Pick<LTHashState, 'hash' |
|
||||
}
|
||||
|
||||
const generateSnapshotMac = (lthash: Uint8Array, version: number, name: WAPatchName, key: Buffer) => {
|
||||
const total = Buffer.concat([
|
||||
lthash,
|
||||
to64BitNetworkOrder(version),
|
||||
Buffer.from(name, 'utf-8')
|
||||
])
|
||||
const total = Buffer.concat([lthash, to64BitNetworkOrder(version), Buffer.from(name, 'utf-8')])
|
||||
return hmacSign(total, key, 'sha256')
|
||||
}
|
||||
|
||||
const generatePatchMac = (snapshotMac: Uint8Array, valueMacs: Uint8Array[], version: number, type: WAPatchName, key: Buffer) => {
|
||||
const total = Buffer.concat([
|
||||
snapshotMac,
|
||||
...valueMacs,
|
||||
to64BitNetworkOrder(version),
|
||||
Buffer.from(type, 'utf-8')
|
||||
])
|
||||
const generatePatchMac = (
|
||||
snapshotMac: Uint8Array,
|
||||
valueMacs: Uint8Array[],
|
||||
version: number,
|
||||
type: WAPatchName,
|
||||
key: Buffer
|
||||
) => {
|
||||
const total = Buffer.concat([snapshotMac, ...valueMacs, to64BitNetworkOrder(version), Buffer.from(type, 'utf-8')])
|
||||
return hmacSign(total, key)
|
||||
}
|
||||
|
||||
export const newLTHashState = (): LTHashState => ({ version: 0, hash: Buffer.alloc(128), indexValueMap: {} })
|
||||
|
||||
export const encodeSyncdPatch = async(
|
||||
export const encodeSyncdPatch = async (
|
||||
{ type, index, syncAction, apiVersion, operation }: WAPatchCreate,
|
||||
myAppStateKeyId: string,
|
||||
state: LTHashState,
|
||||
getAppStateSyncKey: FetchAppStateSyncKey
|
||||
) => {
|
||||
const key = !!myAppStateKeyId ? await getAppStateSyncKey(myAppStateKeyId) : undefined
|
||||
if(!key) {
|
||||
if (!key) {
|
||||
throw new Boom(`myAppStateKey ("${myAppStateKeyId}") not present`, { statusCode: 404 })
|
||||
}
|
||||
|
||||
@@ -185,7 +199,7 @@ export const encodeSyncdPatch = async(
|
||||
return { patch, state }
|
||||
}
|
||||
|
||||
export const decodeSyncdMutations = async(
|
||||
export const decodeSyncdMutations = async (
|
||||
msgMutations: (proto.ISyncdMutation | proto.ISyncdRecord)[],
|
||||
initialState: LTHashState,
|
||||
getAppStateSyncKey: FetchAppStateSyncKey,
|
||||
@@ -196,19 +210,20 @@ export const decodeSyncdMutations = async(
|
||||
// indexKey used to HMAC sign record.index.blob
|
||||
// valueEncryptionKey used to AES-256-CBC encrypt record.value.blob[0:-32]
|
||||
// the remaining record.value.blob[0:-32] is the mac, it the HMAC sign of key.keyId + decoded proto data + length of bytes in keyId
|
||||
for(const msgMutation of msgMutations) {
|
||||
for (const msgMutation of msgMutations) {
|
||||
// if it's a syncdmutation, get the operation property
|
||||
// otherwise, if it's only a record -- it'll be a SET mutation
|
||||
const operation = 'operation' in msgMutation ? msgMutation.operation : proto.SyncdMutation.SyncdOperation.SET
|
||||
const record = ('record' in msgMutation && !!msgMutation.record) ? msgMutation.record : msgMutation as proto.ISyncdRecord
|
||||
const record =
|
||||
'record' in msgMutation && !!msgMutation.record ? msgMutation.record : (msgMutation as proto.ISyncdRecord)
|
||||
|
||||
const key = await getKey(record.keyId!.id!)
|
||||
const content = Buffer.from(record.value!.blob!)
|
||||
const encContent = content.slice(0, -32)
|
||||
const ogValueMac = content.slice(-32)
|
||||
if(validateMacs) {
|
||||
if (validateMacs) {
|
||||
const contentHmac = generateMac(operation!, encContent, record.keyId!.id!, key.valueMacKey)
|
||||
if(Buffer.compare(contentHmac, ogValueMac) !== 0) {
|
||||
if (Buffer.compare(contentHmac, ogValueMac) !== 0) {
|
||||
throw new Boom('HMAC content verification failed')
|
||||
}
|
||||
}
|
||||
@@ -216,9 +231,9 @@ export const decodeSyncdMutations = async(
|
||||
const result = aesDecrypt(encContent, key.valueEncryptionKey)
|
||||
const syncAction = proto.SyncActionData.decode(result)
|
||||
|
||||
if(validateMacs) {
|
||||
if (validateMacs) {
|
||||
const hmac = hmacSign(syncAction.index!, key.indexKey)
|
||||
if(Buffer.compare(hmac, record.index!.blob!) !== 0) {
|
||||
if (Buffer.compare(hmac, record.index!.blob!) !== 0) {
|
||||
throw new Boom('HMAC index verification failed')
|
||||
}
|
||||
}
|
||||
@@ -238,15 +253,18 @@ export const decodeSyncdMutations = async(
|
||||
async function getKey(keyId: Uint8Array) {
|
||||
const base64Key = Buffer.from(keyId).toString('base64')
|
||||
const keyEnc = await getAppStateSyncKey(base64Key)
|
||||
if(!keyEnc) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode mutation`, { statusCode: 404, data: { msgMutations } })
|
||||
if (!keyEnc) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode mutation`, {
|
||||
statusCode: 404,
|
||||
data: { msgMutations }
|
||||
})
|
||||
}
|
||||
|
||||
return mutationKeys(keyEnc.keyData!)
|
||||
}
|
||||
}
|
||||
|
||||
export const decodeSyncdPatch = async(
|
||||
export const decodeSyncdPatch = async (
|
||||
msg: proto.ISyncdPatch,
|
||||
name: WAPatchName,
|
||||
initialState: LTHashState,
|
||||
@@ -254,18 +272,24 @@ export const decodeSyncdPatch = async(
|
||||
onMutation: (mutation: ChatMutation) => void,
|
||||
validateMacs: boolean
|
||||
) => {
|
||||
if(validateMacs) {
|
||||
if (validateMacs) {
|
||||
const base64Key = Buffer.from(msg.keyId!.id!).toString('base64')
|
||||
const mainKeyObj = await getAppStateSyncKey(base64Key)
|
||||
if(!mainKeyObj) {
|
||||
if (!mainKeyObj) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode patch`, { statusCode: 404, data: { msg } })
|
||||
}
|
||||
|
||||
const mainKey = await mutationKeys(mainKeyObj.keyData!)
|
||||
const mutationmacs = msg.mutations!.map(mutation => mutation.record!.value!.blob!.slice(-32))
|
||||
|
||||
const patchMac = generatePatchMac(msg.snapshotMac!, mutationmacs, toNumber(msg.version!.version), name, mainKey.patchMacKey)
|
||||
if(Buffer.compare(patchMac, msg.patchMac!) !== 0) {
|
||||
const patchMac = generatePatchMac(
|
||||
msg.snapshotMac!,
|
||||
mutationmacs,
|
||||
toNumber(msg.version!.version),
|
||||
name,
|
||||
mainKey.patchMacKey
|
||||
)
|
||||
if (Buffer.compare(patchMac, msg.patchMac!) !== 0) {
|
||||
throw new Boom('Invalid patch mac')
|
||||
}
|
||||
}
|
||||
@@ -274,68 +298,59 @@ export const decodeSyncdPatch = async(
|
||||
return result
|
||||
}
|
||||
|
||||
export const extractSyncdPatches = async(
|
||||
result: BinaryNode,
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
export const extractSyncdPatches = async (result: BinaryNode, options: AxiosRequestConfig<{}>) => {
|
||||
const syncNode = getBinaryNodeChild(result, 'sync')
|
||||
const collectionNodes = getBinaryNodeChildren(syncNode, 'collection')
|
||||
|
||||
const final = {} as { [T in WAPatchName]: { patches: proto.ISyncdPatch[], hasMorePatches: boolean, snapshot?: proto.ISyncdSnapshot } }
|
||||
const final = {} as {
|
||||
[T in WAPatchName]: { patches: proto.ISyncdPatch[]; hasMorePatches: boolean; snapshot?: proto.ISyncdSnapshot }
|
||||
}
|
||||
await Promise.all(
|
||||
collectionNodes.map(
|
||||
async collectionNode => {
|
||||
const patchesNode = getBinaryNodeChild(collectionNode, 'patches')
|
||||
collectionNodes.map(async collectionNode => {
|
||||
const patchesNode = getBinaryNodeChild(collectionNode, 'patches')
|
||||
|
||||
const patches = getBinaryNodeChildren(patchesNode || collectionNode, 'patch')
|
||||
const snapshotNode = getBinaryNodeChild(collectionNode, 'snapshot')
|
||||
const patches = getBinaryNodeChildren(patchesNode || collectionNode, 'patch')
|
||||
const snapshotNode = getBinaryNodeChild(collectionNode, 'snapshot')
|
||||
|
||||
const syncds: proto.ISyncdPatch[] = []
|
||||
const name = collectionNode.attrs.name as WAPatchName
|
||||
const syncds: proto.ISyncdPatch[] = []
|
||||
const name = collectionNode.attrs.name as WAPatchName
|
||||
|
||||
const hasMorePatches = collectionNode.attrs.has_more_patches === 'true'
|
||||
const hasMorePatches = collectionNode.attrs.has_more_patches === 'true'
|
||||
|
||||
let snapshot: proto.ISyncdSnapshot | undefined = undefined
|
||||
if(snapshotNode && !!snapshotNode.content) {
|
||||
if(!Buffer.isBuffer(snapshotNode)) {
|
||||
snapshotNode.content = Buffer.from(Object.values(snapshotNode.content))
|
||||
}
|
||||
|
||||
const blobRef = proto.ExternalBlobReference.decode(
|
||||
snapshotNode.content as Buffer
|
||||
)
|
||||
const data = await downloadExternalBlob(blobRef, options)
|
||||
snapshot = proto.SyncdSnapshot.decode(data)
|
||||
let snapshot: proto.ISyncdSnapshot | undefined = undefined
|
||||
if (snapshotNode && !!snapshotNode.content) {
|
||||
if (!Buffer.isBuffer(snapshotNode)) {
|
||||
snapshotNode.content = Buffer.from(Object.values(snapshotNode.content))
|
||||
}
|
||||
|
||||
for(let { content } of patches) {
|
||||
if(content) {
|
||||
if(!Buffer.isBuffer(content)) {
|
||||
content = Buffer.from(Object.values(content))
|
||||
}
|
||||
|
||||
const syncd = proto.SyncdPatch.decode(content as Uint8Array)
|
||||
if(!syncd.version) {
|
||||
syncd.version = { version: +collectionNode.attrs.version + 1 }
|
||||
}
|
||||
|
||||
syncds.push(syncd)
|
||||
}
|
||||
}
|
||||
|
||||
final[name] = { patches: syncds, hasMorePatches, snapshot }
|
||||
const blobRef = proto.ExternalBlobReference.decode(snapshotNode.content as Buffer)
|
||||
const data = await downloadExternalBlob(blobRef, options)
|
||||
snapshot = proto.SyncdSnapshot.decode(data)
|
||||
}
|
||||
)
|
||||
|
||||
for (let { content } of patches) {
|
||||
if (content) {
|
||||
if (!Buffer.isBuffer(content)) {
|
||||
content = Buffer.from(Object.values(content))
|
||||
}
|
||||
|
||||
const syncd = proto.SyncdPatch.decode(content as Uint8Array)
|
||||
if (!syncd.version) {
|
||||
syncd.version = { version: +collectionNode.attrs.version + 1 }
|
||||
}
|
||||
|
||||
syncds.push(syncd)
|
||||
}
|
||||
}
|
||||
|
||||
final[name] = { patches: syncds, hasMorePatches, snapshot }
|
||||
})
|
||||
)
|
||||
|
||||
return final
|
||||
}
|
||||
|
||||
|
||||
export const downloadExternalBlob = async(
|
||||
blob: proto.IExternalBlobReference,
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
export const downloadExternalBlob = async (blob: proto.IExternalBlobReference, options: AxiosRequestConfig<{}>) => {
|
||||
const stream = await downloadContentFromMessage(blob, 'md-app-state', { options })
|
||||
const bufferArray: Buffer[] = []
|
||||
for await (const chunk of stream) {
|
||||
@@ -345,16 +360,13 @@ export const downloadExternalBlob = async(
|
||||
return Buffer.concat(bufferArray)
|
||||
}
|
||||
|
||||
export const downloadExternalPatch = async(
|
||||
blob: proto.IExternalBlobReference,
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
export const downloadExternalPatch = async (blob: proto.IExternalBlobReference, options: AxiosRequestConfig<{}>) => {
|
||||
const buffer = await downloadExternalBlob(blob, options)
|
||||
const syncData = proto.SyncdMutations.decode(buffer)
|
||||
return syncData
|
||||
}
|
||||
|
||||
export const decodeSyncdSnapshot = async(
|
||||
export const decodeSyncdSnapshot = async (
|
||||
name: WAPatchName,
|
||||
snapshot: proto.ISyncdSnapshot,
|
||||
getAppStateSyncKey: FetchAppStateSyncKey,
|
||||
@@ -365,34 +377,33 @@ export const decodeSyncdSnapshot = async(
|
||||
newState.version = toNumber(snapshot.version!.version)
|
||||
|
||||
const mutationMap: ChatMutationMap = {}
|
||||
const areMutationsRequired = typeof minimumVersionNumber === 'undefined'
|
||||
|| newState.version > minimumVersionNumber
|
||||
const areMutationsRequired = typeof minimumVersionNumber === 'undefined' || newState.version > minimumVersionNumber
|
||||
|
||||
const { hash, indexValueMap } = await decodeSyncdMutations(
|
||||
snapshot.records!,
|
||||
newState,
|
||||
getAppStateSyncKey,
|
||||
areMutationsRequired
|
||||
? (mutation) => {
|
||||
const index = mutation.syncAction.index?.toString()
|
||||
mutationMap[index!] = mutation
|
||||
}
|
||||
: () => { },
|
||||
? mutation => {
|
||||
const index = mutation.syncAction.index?.toString()
|
||||
mutationMap[index!] = mutation
|
||||
}
|
||||
: () => {},
|
||||
validateMacs
|
||||
)
|
||||
newState.hash = hash
|
||||
newState.indexValueMap = indexValueMap
|
||||
|
||||
if(validateMacs) {
|
||||
if (validateMacs) {
|
||||
const base64Key = Buffer.from(snapshot.keyId!.id!).toString('base64')
|
||||
const keyEnc = await getAppStateSyncKey(base64Key)
|
||||
if(!keyEnc) {
|
||||
if (!keyEnc) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode mutation`)
|
||||
}
|
||||
|
||||
const result = await mutationKeys(keyEnc.keyData!)
|
||||
const computedSnapshotMac = generateSnapshotMac(newState.hash, newState.version, name, result.snapshotMacKey)
|
||||
if(Buffer.compare(snapshot.mac!, computedSnapshotMac) !== 0) {
|
||||
if (Buffer.compare(snapshot.mac!, computedSnapshotMac) !== 0) {
|
||||
throw new Boom(`failed to verify LTHash at ${newState.version} of ${name} from snapshot`)
|
||||
}
|
||||
}
|
||||
@@ -403,7 +414,7 @@ export const decodeSyncdSnapshot = async(
|
||||
}
|
||||
}
|
||||
|
||||
export const decodePatches = async(
|
||||
export const decodePatches = async (
|
||||
name: WAPatchName,
|
||||
syncds: proto.ISyncdPatch[],
|
||||
initial: LTHashState,
|
||||
@@ -420,9 +431,9 @@ export const decodePatches = async(
|
||||
|
||||
const mutationMap: ChatMutationMap = {}
|
||||
|
||||
for(const syncd of syncds) {
|
||||
for (const syncd of syncds) {
|
||||
const { version, keyId, snapshotMac } = syncd
|
||||
if(syncd.externalMutations) {
|
||||
if (syncd.externalMutations) {
|
||||
logger?.trace({ name, version }, 'downloading external patch')
|
||||
const ref = await downloadExternalPatch(syncd.externalMutations, options)
|
||||
logger?.debug({ name, version, mutations: ref.mutations.length }, 'downloaded external patch')
|
||||
@@ -441,26 +452,26 @@ export const decodePatches = async(
|
||||
getAppStateSyncKey,
|
||||
shouldMutate
|
||||
? mutation => {
|
||||
const index = mutation.syncAction.index?.toString()
|
||||
mutationMap[index!] = mutation
|
||||
}
|
||||
: (() => { }),
|
||||
const index = mutation.syncAction.index?.toString()
|
||||
mutationMap[index!] = mutation
|
||||
}
|
||||
: () => {},
|
||||
true
|
||||
)
|
||||
|
||||
newState.hash = decodeResult.hash
|
||||
newState.indexValueMap = decodeResult.indexValueMap
|
||||
|
||||
if(validateMacs) {
|
||||
if (validateMacs) {
|
||||
const base64Key = Buffer.from(keyId!.id!).toString('base64')
|
||||
const keyEnc = await getAppStateSyncKey(base64Key)
|
||||
if(!keyEnc) {
|
||||
if (!keyEnc) {
|
||||
throw new Boom(`failed to find key "${base64Key}" to decode mutation`)
|
||||
}
|
||||
|
||||
const result = await mutationKeys(keyEnc.keyData!)
|
||||
const computedSnapshotMac = generateSnapshotMac(newState.hash, newState.version, name, result.snapshotMacKey)
|
||||
if(Buffer.compare(snapshotMac!, computedSnapshotMac) !== 0) {
|
||||
if (Buffer.compare(snapshotMac!, computedSnapshotMac) !== 0) {
|
||||
throw new Boom(`failed to verify LTHash at ${newState.version} of ${name}`)
|
||||
}
|
||||
}
|
||||
@@ -472,38 +483,35 @@ export const decodePatches = async(
|
||||
return { state: newState, mutationMap }
|
||||
}
|
||||
|
||||
export const chatModificationToAppPatch = (
|
||||
mod: ChatModification,
|
||||
jid: string
|
||||
) => {
|
||||
export const chatModificationToAppPatch = (mod: ChatModification, jid: string) => {
|
||||
const OP = proto.SyncdMutation.SyncdOperation
|
||||
const getMessageRange = (lastMessages: LastMessageList) => {
|
||||
let messageRange: proto.SyncActionValue.ISyncActionMessageRange
|
||||
if(Array.isArray(lastMessages)) {
|
||||
if (Array.isArray(lastMessages)) {
|
||||
const lastMsg = lastMessages[lastMessages.length - 1]
|
||||
messageRange = {
|
||||
lastMessageTimestamp: lastMsg?.messageTimestamp,
|
||||
messages: lastMessages?.length ? lastMessages.map(
|
||||
m => {
|
||||
if(!m.key?.id || !m.key?.remoteJid) {
|
||||
throw new Boom('Incomplete key', { statusCode: 400, data: m })
|
||||
}
|
||||
messages: lastMessages?.length
|
||||
? lastMessages.map(m => {
|
||||
if (!m.key?.id || !m.key?.remoteJid) {
|
||||
throw new Boom('Incomplete key', { statusCode: 400, data: m })
|
||||
}
|
||||
|
||||
if(isJidGroup(m.key.remoteJid) && !m.key.fromMe && !m.key.participant) {
|
||||
throw new Boom('Expected not from me message to have participant', { statusCode: 400, data: m })
|
||||
}
|
||||
if (isJidGroup(m.key.remoteJid) && !m.key.fromMe && !m.key.participant) {
|
||||
throw new Boom('Expected not from me message to have participant', { statusCode: 400, data: m })
|
||||
}
|
||||
|
||||
if(!m.messageTimestamp || !toNumber(m.messageTimestamp)) {
|
||||
throw new Boom('Missing timestamp in last message list', { statusCode: 400, data: m })
|
||||
}
|
||||
if (!m.messageTimestamp || !toNumber(m.messageTimestamp)) {
|
||||
throw new Boom('Missing timestamp in last message list', { statusCode: 400, data: m })
|
||||
}
|
||||
|
||||
if(m.key.participant) {
|
||||
m.key.participant = jidNormalizedUser(m.key.participant)
|
||||
}
|
||||
if (m.key.participant) {
|
||||
m.key.participant = jidNormalizedUser(m.key.participant)
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
) : undefined
|
||||
return m
|
||||
})
|
||||
: undefined
|
||||
}
|
||||
} else {
|
||||
messageRange = lastMessages
|
||||
@@ -513,7 +521,7 @@ export const chatModificationToAppPatch = (
|
||||
}
|
||||
|
||||
let patch: WAPatchCreate
|
||||
if('mute' in mod) {
|
||||
if ('mute' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
muteAction: {
|
||||
@@ -526,7 +534,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 2,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('archive' in mod) {
|
||||
} else if ('archive' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
archiveChatAction: {
|
||||
@@ -539,7 +547,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 3,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('markRead' in mod) {
|
||||
} else if ('markRead' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
markChatAsReadAction: {
|
||||
@@ -552,7 +560,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 3,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('deleteForMe' in mod) {
|
||||
} else if ('deleteForMe' in mod) {
|
||||
const { timestamp, key, deleteMedia } = mod.deleteForMe
|
||||
patch = {
|
||||
syncAction: {
|
||||
@@ -566,7 +574,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 3,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('clear' in mod) {
|
||||
} else if ('clear' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
clearChatAction: {} // add message range later
|
||||
@@ -576,7 +584,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 6,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('pin' in mod) {
|
||||
} else if ('pin' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
pinAction: {
|
||||
@@ -588,7 +596,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 5,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('star' in mod) {
|
||||
} else if ('star' in mod) {
|
||||
const key = mod.star.messages[0]
|
||||
patch = {
|
||||
syncAction: {
|
||||
@@ -601,11 +609,11 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 2,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('delete' in mod) {
|
||||
} else if ('delete' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
deleteChatAction: {
|
||||
messageRange: getMessageRange(mod.lastMessages),
|
||||
messageRange: getMessageRange(mod.lastMessages)
|
||||
}
|
||||
},
|
||||
index: ['deleteChat', jid, '1'],
|
||||
@@ -613,7 +621,7 @@ export const chatModificationToAppPatch = (
|
||||
apiVersion: 6,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('pushNameSetting' in mod) {
|
||||
} else if ('pushNameSetting' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
pushNameSetting: {
|
||||
@@ -623,71 +631,64 @@ export const chatModificationToAppPatch = (
|
||||
index: ['setting_pushName'],
|
||||
type: 'critical_block',
|
||||
apiVersion: 1,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('addLabel' in mod) {
|
||||
} else if ('addLabel' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
labelEditAction: {
|
||||
name: mod.addLabel.name,
|
||||
color: mod.addLabel.color,
|
||||
predefinedId : mod.addLabel.predefinedId,
|
||||
predefinedId: mod.addLabel.predefinedId,
|
||||
deleted: mod.addLabel.deleted
|
||||
}
|
||||
},
|
||||
index: ['label_edit', mod.addLabel.id],
|
||||
type: 'regular',
|
||||
apiVersion: 3,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('addChatLabel' in mod) {
|
||||
} else if ('addChatLabel' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
labelAssociationAction: {
|
||||
labeled: true,
|
||||
labeled: true
|
||||
}
|
||||
},
|
||||
index: [LabelAssociationType.Chat, mod.addChatLabel.labelId, jid],
|
||||
type: 'regular',
|
||||
apiVersion: 3,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('removeChatLabel' in mod) {
|
||||
} else if ('removeChatLabel' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
labelAssociationAction: {
|
||||
labeled: false,
|
||||
labeled: false
|
||||
}
|
||||
},
|
||||
index: [LabelAssociationType.Chat, mod.removeChatLabel.labelId, jid],
|
||||
type: 'regular',
|
||||
apiVersion: 3,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('addMessageLabel' in mod) {
|
||||
} else if ('addMessageLabel' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
labelAssociationAction: {
|
||||
labeled: true,
|
||||
labeled: true
|
||||
}
|
||||
},
|
||||
index: [
|
||||
LabelAssociationType.Message,
|
||||
mod.addMessageLabel.labelId,
|
||||
jid,
|
||||
mod.addMessageLabel.messageId,
|
||||
'0',
|
||||
'0'
|
||||
],
|
||||
index: [LabelAssociationType.Message, mod.addMessageLabel.labelId, jid, mod.addMessageLabel.messageId, '0', '0'],
|
||||
type: 'regular',
|
||||
apiVersion: 3,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else if('removeMessageLabel' in mod) {
|
||||
} else if ('removeMessageLabel' in mod) {
|
||||
patch = {
|
||||
syncAction: {
|
||||
labelAssociationAction: {
|
||||
labeled: false,
|
||||
labeled: false
|
||||
}
|
||||
},
|
||||
index: [
|
||||
@@ -700,7 +701,7 @@ export const chatModificationToAppPatch = (
|
||||
],
|
||||
type: 'regular',
|
||||
apiVersion: 3,
|
||||
operation: OP.SET,
|
||||
operation: OP.SET
|
||||
}
|
||||
} else {
|
||||
throw new Boom('not supported')
|
||||
@@ -716,7 +717,7 @@ export const processSyncAction = (
|
||||
ev: BaileysEventEmitter,
|
||||
me: Contact,
|
||||
initialSyncOpts?: InitialAppStateSyncOptions,
|
||||
logger?: ILogger,
|
||||
logger?: ILogger
|
||||
) => {
|
||||
const isInitialSync = !!initialSyncOpts
|
||||
const accountSettings = initialSyncOpts?.accountSettings
|
||||
@@ -728,20 +729,15 @@ export const processSyncAction = (
|
||||
index: [type, id, msgId, fromMe]
|
||||
} = syncAction
|
||||
|
||||
if(action?.muteAction) {
|
||||
ev.emit(
|
||||
'chats.update',
|
||||
[
|
||||
{
|
||||
id,
|
||||
muteEndTime: action.muteAction?.muted
|
||||
? toNumber(action.muteAction.muteEndTimestamp)
|
||||
: null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}
|
||||
]
|
||||
)
|
||||
} else if(action?.archiveChatAction || type === 'archive' || type === 'unarchive') {
|
||||
if (action?.muteAction) {
|
||||
ev.emit('chats.update', [
|
||||
{
|
||||
id,
|
||||
muteEndTime: action.muteAction?.muted ? toNumber(action.muteAction.muteEndTimestamp) : null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}
|
||||
])
|
||||
} else if (action?.archiveChatAction || type === 'archive' || type === 'unarchive') {
|
||||
// okay so we've to do some annoying computation here
|
||||
// when we're initially syncing the app state
|
||||
// there are a few cases we need to handle
|
||||
@@ -753,9 +749,7 @@ export const processSyncAction = (
|
||||
// 2. if the account unarchiveChats setting is false -- then it doesn't matter,
|
||||
// it'll always take an app state action to mark in unarchived -- which we'll get anyway
|
||||
const archiveAction = action?.archiveChatAction
|
||||
const isArchived = archiveAction
|
||||
? archiveAction.archived
|
||||
: type === 'archive'
|
||||
const isArchived = archiveAction ? archiveAction.archived : type === 'archive'
|
||||
// // basically we don't need to fire an "archive" update if the chat is being marked unarchvied
|
||||
// // this only applies for the initial sync
|
||||
// if(isInitialSync && !isArchived) {
|
||||
@@ -765,24 +759,28 @@ export const processSyncAction = (
|
||||
const msgRange = !accountSettings?.unarchiveChats ? undefined : archiveAction?.messageRange
|
||||
// logger?.debug({ chat: id, syncAction }, 'message range archive')
|
||||
|
||||
ev.emit('chats.update', [{
|
||||
id,
|
||||
archived: isArchived,
|
||||
conditional: getChatUpdateConditional(id, msgRange)
|
||||
}])
|
||||
} else if(action?.markChatAsReadAction) {
|
||||
ev.emit('chats.update', [
|
||||
{
|
||||
id,
|
||||
archived: isArchived,
|
||||
conditional: getChatUpdateConditional(id, msgRange)
|
||||
}
|
||||
])
|
||||
} else if (action?.markChatAsReadAction) {
|
||||
const markReadAction = action.markChatAsReadAction
|
||||
// basically we don't need to fire an "read" update if the chat is being marked as read
|
||||
// because the chat is read by default
|
||||
// this only applies for the initial sync
|
||||
const isNullUpdate = isInitialSync && markReadAction.read
|
||||
|
||||
ev.emit('chats.update', [{
|
||||
id,
|
||||
unreadCount: isNullUpdate ? null : !!markReadAction?.read ? 0 : -1,
|
||||
conditional: getChatUpdateConditional(id, markReadAction?.messageRange)
|
||||
}])
|
||||
} else if(action?.deleteMessageForMeAction || type === 'deleteMessageForMe') {
|
||||
ev.emit('chats.update', [
|
||||
{
|
||||
id,
|
||||
unreadCount: isNullUpdate ? null : !!markReadAction?.read ? 0 : -1,
|
||||
conditional: getChatUpdateConditional(id, markReadAction?.messageRange)
|
||||
}
|
||||
])
|
||||
} else if (action?.deleteMessageForMeAction || type === 'deleteMessageForMe') {
|
||||
ev.emit('messages.delete', {
|
||||
keys: [
|
||||
{
|
||||
@@ -792,30 +790,32 @@ export const processSyncAction = (
|
||||
}
|
||||
]
|
||||
})
|
||||
} else if(action?.contactAction) {
|
||||
} else if (action?.contactAction) {
|
||||
ev.emit('contacts.upsert', [{ id, name: action.contactAction.fullName! }])
|
||||
} else if(action?.pushNameSetting) {
|
||||
} else if (action?.pushNameSetting) {
|
||||
const name = action?.pushNameSetting?.name
|
||||
if(name && me?.name !== name) {
|
||||
if (name && me?.name !== name) {
|
||||
ev.emit('creds.update', { me: { ...me, name } })
|
||||
}
|
||||
} else if(action?.pinAction) {
|
||||
ev.emit('chats.update', [{
|
||||
id,
|
||||
pinned: action.pinAction?.pinned ? toNumber(action.timestamp) : null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}])
|
||||
} else if(action?.unarchiveChatsSetting) {
|
||||
} else if (action?.pinAction) {
|
||||
ev.emit('chats.update', [
|
||||
{
|
||||
id,
|
||||
pinned: action.pinAction?.pinned ? toNumber(action.timestamp) : null,
|
||||
conditional: getChatUpdateConditional(id, undefined)
|
||||
}
|
||||
])
|
||||
} else if (action?.unarchiveChatsSetting) {
|
||||
const unarchiveChats = !!action.unarchiveChatsSetting.unarchiveChats
|
||||
ev.emit('creds.update', { accountSettings: { unarchiveChats } })
|
||||
|
||||
logger?.info(`archive setting updated => '${action.unarchiveChatsSetting.unarchiveChats}'`)
|
||||
if(accountSettings) {
|
||||
if (accountSettings) {
|
||||
accountSettings.unarchiveChats = unarchiveChats
|
||||
}
|
||||
} else if(action?.starAction || type === 'star') {
|
||||
} else if (action?.starAction || type === 'star') {
|
||||
let starred = action?.starAction?.starred
|
||||
if(typeof starred !== 'boolean') {
|
||||
if (typeof starred !== 'boolean') {
|
||||
starred = syncAction.index[syncAction.index.length - 1] === '1'
|
||||
}
|
||||
|
||||
@@ -825,11 +825,11 @@ export const processSyncAction = (
|
||||
update: { starred }
|
||||
}
|
||||
])
|
||||
} else if(action?.deleteChatAction || type === 'deleteChat') {
|
||||
if(!isInitialSync) {
|
||||
} else if (action?.deleteChatAction || type === 'deleteChat') {
|
||||
if (!isInitialSync) {
|
||||
ev.emit('chats.delete', [id])
|
||||
}
|
||||
} else if(action?.labelEditAction) {
|
||||
} else if (action?.labelEditAction) {
|
||||
const { name, color, deleted, predefinedId } = action.labelEditAction
|
||||
|
||||
ev.emit('labels.edit', {
|
||||
@@ -839,42 +839,47 @@ export const processSyncAction = (
|
||||
deleted: deleted!,
|
||||
predefinedId: predefinedId ? String(predefinedId) : undefined
|
||||
})
|
||||
} else if(action?.labelAssociationAction) {
|
||||
} else if (action?.labelAssociationAction) {
|
||||
ev.emit('labels.association', {
|
||||
type: action.labelAssociationAction.labeled
|
||||
? 'add'
|
||||
: 'remove',
|
||||
association: type === LabelAssociationType.Chat
|
||||
? {
|
||||
type: LabelAssociationType.Chat,
|
||||
chatId: syncAction.index[2],
|
||||
labelId: syncAction.index[1]
|
||||
} as ChatLabelAssociation
|
||||
: {
|
||||
type: LabelAssociationType.Message,
|
||||
chatId: syncAction.index[2],
|
||||
messageId: syncAction.index[3],
|
||||
labelId: syncAction.index[1]
|
||||
} as MessageLabelAssociation
|
||||
type: action.labelAssociationAction.labeled ? 'add' : 'remove',
|
||||
association:
|
||||
type === LabelAssociationType.Chat
|
||||
? ({
|
||||
type: LabelAssociationType.Chat,
|
||||
chatId: syncAction.index[2],
|
||||
labelId: syncAction.index[1]
|
||||
} as ChatLabelAssociation)
|
||||
: ({
|
||||
type: LabelAssociationType.Message,
|
||||
chatId: syncAction.index[2],
|
||||
messageId: syncAction.index[3],
|
||||
labelId: syncAction.index[1]
|
||||
} as MessageLabelAssociation)
|
||||
})
|
||||
} else {
|
||||
logger?.debug({ syncAction, id }, 'unprocessable update')
|
||||
}
|
||||
|
||||
function getChatUpdateConditional(id: string, msgRange: proto.SyncActionValue.ISyncActionMessageRange | null | undefined): ChatUpdate['conditional'] {
|
||||
function getChatUpdateConditional(
|
||||
id: string,
|
||||
msgRange: proto.SyncActionValue.ISyncActionMessageRange | null | undefined
|
||||
): ChatUpdate['conditional'] {
|
||||
return isInitialSync
|
||||
? (data) => {
|
||||
const chat = data.historySets.chats[id] || data.chatUpserts[id]
|
||||
if(chat) {
|
||||
return msgRange ? isValidPatchBasedOnMessageRange(chat, msgRange) : true
|
||||
? data => {
|
||||
const chat = data.historySets.chats[id] || data.chatUpserts[id]
|
||||
if (chat) {
|
||||
return msgRange ? isValidPatchBasedOnMessageRange(chat, msgRange) : true
|
||||
}
|
||||
}
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
|
||||
function isValidPatchBasedOnMessageRange(chat: Chat, msgRange: proto.SyncActionValue.ISyncActionMessageRange | null | undefined) {
|
||||
const lastMsgTimestamp = Number(msgRange?.lastMessageTimestamp || msgRange?.lastSystemMessageTimestamp || 0)
|
||||
const chatLastMsgTimestamp = Number(chat?.lastMessageRecvTimestamp || 0)
|
||||
return lastMsgTimestamp >= chatLastMsgTimestamp
|
||||
function isValidPatchBasedOnMessageRange(
|
||||
chat: Chat,
|
||||
msgRange: proto.SyncActionValue.ISyncActionMessageRange | null | undefined
|
||||
) {
|
||||
const lastMsgTimestamp = Number(msgRange?.lastMessageTimestamp || msgRange?.lastSystemMessageTimestamp || 0)
|
||||
const chatLastMsgTimestamp = Number(chat?.lastMessageRecvTimestamp || 0)
|
||||
return lastMsgTimestamp >= chatLastMsgTimestamp
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,11 +7,8 @@ import { KeyPair } from '../Types'
|
||||
const { subtle } = globalThis.crypto
|
||||
|
||||
/** prefix version byte to the pub keys, required for some curve crypto functions */
|
||||
export const generateSignalPubKey = (pubKey: Uint8Array | Buffer) => (
|
||||
pubKey.length === 33
|
||||
? pubKey
|
||||
: Buffer.concat([ KEY_BUNDLE_TYPE, pubKey ])
|
||||
)
|
||||
export const generateSignalPubKey = (pubKey: Uint8Array | Buffer) =>
|
||||
pubKey.length === 33 ? pubKey : Buffer.concat([KEY_BUNDLE_TYPE, pubKey])
|
||||
|
||||
export const Curve = {
|
||||
generateKeyPair: (): KeyPair => {
|
||||
@@ -26,14 +23,12 @@ export const Curve = {
|
||||
const shared = libsignal.curve.calculateAgreement(generateSignalPubKey(publicKey), privateKey)
|
||||
return Buffer.from(shared)
|
||||
},
|
||||
sign: (privateKey: Uint8Array, buf: Uint8Array) => (
|
||||
libsignal.curve.calculateSignature(privateKey, buf)
|
||||
),
|
||||
sign: (privateKey: Uint8Array, buf: Uint8Array) => libsignal.curve.calculateSignature(privateKey, buf),
|
||||
verify: (pubKey: Uint8Array, message: Uint8Array, signature: Uint8Array) => {
|
||||
try {
|
||||
libsignal.curve.verifySignature(generateSignalPubKey(pubKey), message, signature)
|
||||
return true
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -73,7 +68,7 @@ export function aesDecryptGCM(ciphertext: Uint8Array, key: Uint8Array, iv: Uint8
|
||||
decipher.setAAD(additionalData)
|
||||
decipher.setAuthTag(tag)
|
||||
|
||||
return Buffer.concat([ decipher.update(enc), decipher.final() ])
|
||||
return Buffer.concat([decipher.update(enc), decipher.final()])
|
||||
}
|
||||
|
||||
export function aesEncryptCTR(plaintext: Uint8Array, key: Uint8Array, iv: Uint8Array) {
|
||||
@@ -111,7 +106,11 @@ export function aesEncrypWithIV(buffer: Buffer, key: Buffer, IV: Buffer) {
|
||||
}
|
||||
|
||||
// sign HMAC using SHA 256
|
||||
export function hmacSign(buffer: Buffer | Uint8Array, key: Buffer | Uint8Array, variant: 'sha256' | 'sha512' = 'sha256') {
|
||||
export function hmacSign(
|
||||
buffer: Buffer | Uint8Array,
|
||||
key: Buffer | Uint8Array,
|
||||
variant: 'sha256' | 'sha512' = 'sha256'
|
||||
) {
|
||||
return createHmac(variant, key).update(buffer).digest()
|
||||
}
|
||||
|
||||
@@ -127,27 +126,17 @@ export function md5(buffer: Buffer) {
|
||||
export async function hkdf(
|
||||
buffer: Uint8Array | Buffer,
|
||||
expandedLength: number,
|
||||
info: { salt?: Buffer, info?: string }
|
||||
info: { salt?: Buffer; info?: string }
|
||||
): Promise<Buffer> {
|
||||
// Ensure we have a Uint8Array for the key material
|
||||
const inputKeyMaterial = buffer instanceof Uint8Array
|
||||
? buffer
|
||||
: new Uint8Array(buffer)
|
||||
const inputKeyMaterial = buffer instanceof Uint8Array ? buffer : new Uint8Array(buffer)
|
||||
|
||||
// Set default values if not provided
|
||||
const salt = info.salt ? new Uint8Array(info.salt) : new Uint8Array(0)
|
||||
const infoBytes = info.info
|
||||
? new TextEncoder().encode(info.info)
|
||||
: new Uint8Array(0)
|
||||
const infoBytes = info.info ? new TextEncoder().encode(info.info) : new Uint8Array(0)
|
||||
|
||||
// Import the input key material
|
||||
const importedKey = await subtle.importKey(
|
||||
'raw',
|
||||
inputKeyMaterial,
|
||||
{ name: 'HKDF' },
|
||||
false,
|
||||
['deriveBits']
|
||||
)
|
||||
const importedKey = await subtle.importKey('raw', inputKeyMaterial, { name: 'HKDF' }, false, ['deriveBits'])
|
||||
|
||||
// Derive bits using HKDF
|
||||
const derivedBits = await subtle.deriveBits(
|
||||
@@ -164,7 +153,6 @@ export async function hkdf(
|
||||
return Buffer.from(derivedBits)
|
||||
}
|
||||
|
||||
|
||||
export async function derivePairingCodeKey(pairingCode: string, salt: Buffer): Promise<Buffer> {
|
||||
// Convert inputs to formats Web Crypto API can work with
|
||||
const encoder = new TextEncoder()
|
||||
@@ -172,13 +160,7 @@ export async function derivePairingCodeKey(pairingCode: string, salt: Buffer): P
|
||||
const saltBuffer = salt instanceof Uint8Array ? salt : new Uint8Array(salt)
|
||||
|
||||
// Import the pairing code as key material
|
||||
const keyMaterial = await subtle.importKey(
|
||||
'raw',
|
||||
pairingCodeBuffer,
|
||||
{ name: 'PBKDF2' },
|
||||
false,
|
||||
['deriveBits']
|
||||
)
|
||||
const keyMaterial = await subtle.importKey('raw', pairingCodeBuffer, { name: 'PBKDF2' }, false, ['deriveBits'])
|
||||
|
||||
// Derive bits using PBKDF2 with the same parameters
|
||||
// 2 << 16 = 131,072 iterations
|
||||
|
||||
@@ -1,7 +1,17 @@
|
||||
import { Boom } from '@hapi/boom'
|
||||
import { proto } from '../../WAProto'
|
||||
import { SignalRepository, WAMessageKey } from '../Types'
|
||||
import { areJidsSameUser, BinaryNode, isJidBroadcast, isJidGroup, isJidMetaIa, isJidNewsletter, isJidStatusBroadcast, isJidUser, isLidUser } from '../WABinary'
|
||||
import {
|
||||
areJidsSameUser,
|
||||
BinaryNode,
|
||||
isJidBroadcast,
|
||||
isJidGroup,
|
||||
isJidMetaIa,
|
||||
isJidNewsletter,
|
||||
isJidStatusBroadcast,
|
||||
isJidUser,
|
||||
isLidUser
|
||||
} from '../WABinary'
|
||||
import { unpadRandomMax16 } from './generics'
|
||||
import { ILogger } from './logger'
|
||||
|
||||
@@ -24,17 +34,20 @@ export const NACK_REASONS = {
|
||||
DBOperationFailed: 552
|
||||
}
|
||||
|
||||
type MessageType = 'chat' | 'peer_broadcast' | 'other_broadcast' | 'group' | 'direct_peer_status' | 'other_status' | 'newsletter'
|
||||
type MessageType =
|
||||
| 'chat'
|
||||
| 'peer_broadcast'
|
||||
| 'other_broadcast'
|
||||
| 'group'
|
||||
| 'direct_peer_status'
|
||||
| 'other_status'
|
||||
| 'newsletter'
|
||||
|
||||
/**
|
||||
* Decode the received node as a message.
|
||||
* @note this will only parse the message, not decrypt it
|
||||
*/
|
||||
export function decodeMessageNode(
|
||||
stanza: BinaryNode,
|
||||
meId: string,
|
||||
meLid: string
|
||||
) {
|
||||
export function decodeMessageNode(stanza: BinaryNode, meId: string, meLid: string) {
|
||||
let msgType: MessageType
|
||||
let chatId: string
|
||||
let author: string
|
||||
@@ -47,9 +60,9 @@ export function decodeMessageNode(
|
||||
const isMe = (jid: string) => areJidsSameUser(jid, meId)
|
||||
const isMeLid = (jid: string) => areJidsSameUser(jid, meLid)
|
||||
|
||||
if(isJidUser(from) || isLidUser(from)) {
|
||||
if(recipient && !isJidMetaIa(recipient)) {
|
||||
if(!isMe(from) && !isMeLid(from)) {
|
||||
if (isJidUser(from) || isLidUser(from)) {
|
||||
if (recipient && !isJidMetaIa(recipient)) {
|
||||
if (!isMe(from) && !isMeLid(from)) {
|
||||
throw new Boom('receipient present, but msg not from me', { data: stanza })
|
||||
}
|
||||
|
||||
@@ -60,21 +73,21 @@ export function decodeMessageNode(
|
||||
|
||||
msgType = 'chat'
|
||||
author = from
|
||||
} else if(isJidGroup(from)) {
|
||||
if(!participant) {
|
||||
} else if (isJidGroup(from)) {
|
||||
if (!participant) {
|
||||
throw new Boom('No participant in group message')
|
||||
}
|
||||
|
||||
msgType = 'group'
|
||||
author = participant
|
||||
chatId = from
|
||||
} else if(isJidBroadcast(from)) {
|
||||
if(!participant) {
|
||||
} else if (isJidBroadcast(from)) {
|
||||
if (!participant) {
|
||||
throw new Boom('No participant in group message')
|
||||
}
|
||||
|
||||
const isParticipantMe = isMe(participant)
|
||||
if(isJidStatusBroadcast(from)) {
|
||||
if (isJidStatusBroadcast(from)) {
|
||||
msgType = isParticipantMe ? 'direct_peer_status' : 'other_status'
|
||||
} else {
|
||||
msgType = isParticipantMe ? 'peer_broadcast' : 'other_broadcast'
|
||||
@@ -82,7 +95,7 @@ export function decodeMessageNode(
|
||||
|
||||
chatId = from
|
||||
author = participant
|
||||
} else if(isJidNewsletter(from)) {
|
||||
} else if (isJidNewsletter(from)) {
|
||||
msgType = 'newsletter'
|
||||
chatId = from
|
||||
author = from
|
||||
@@ -107,7 +120,7 @@ export function decodeMessageNode(
|
||||
broadcast: isJidBroadcast(from)
|
||||
}
|
||||
|
||||
if(key.fromMe) {
|
||||
if (key.fromMe) {
|
||||
fullMessage.status = proto.WebMessageInfo.Status.SERVER_ACK
|
||||
}
|
||||
|
||||
@@ -132,19 +145,19 @@ export const decryptMessageNode = (
|
||||
author,
|
||||
async decrypt() {
|
||||
let decryptables = 0
|
||||
if(Array.isArray(stanza.content)) {
|
||||
for(const { tag, attrs, content } of stanza.content) {
|
||||
if(tag === 'verified_name' && content instanceof Uint8Array) {
|
||||
if (Array.isArray(stanza.content)) {
|
||||
for (const { tag, attrs, content } of stanza.content) {
|
||||
if (tag === 'verified_name' && content instanceof Uint8Array) {
|
||||
const cert = proto.VerifiedNameCertificate.decode(content)
|
||||
const details = proto.VerifiedNameCertificate.Details.decode(cert.details!)
|
||||
fullMessage.verifiedBizName = details.verifiedName
|
||||
}
|
||||
|
||||
if(tag !== 'enc' && tag !== 'plaintext') {
|
||||
if (tag !== 'enc' && tag !== 'plaintext') {
|
||||
continue
|
||||
}
|
||||
|
||||
if(!(content instanceof Uint8Array)) {
|
||||
if (!(content instanceof Uint8Array)) {
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -155,53 +168,52 @@ export const decryptMessageNode = (
|
||||
try {
|
||||
const e2eType = tag === 'plaintext' ? 'plaintext' : attrs.type
|
||||
switch (e2eType) {
|
||||
case 'skmsg':
|
||||
msgBuffer = await repository.decryptGroupMessage({
|
||||
group: sender,
|
||||
authorJid: author,
|
||||
msg: content
|
||||
})
|
||||
break
|
||||
case 'pkmsg':
|
||||
case 'msg':
|
||||
const user = isJidUser(sender) ? sender : author
|
||||
msgBuffer = await repository.decryptMessage({
|
||||
jid: user,
|
||||
type: e2eType,
|
||||
ciphertext: content
|
||||
})
|
||||
break
|
||||
case 'plaintext':
|
||||
msgBuffer = content
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown e2e type: ${e2eType}`)
|
||||
case 'skmsg':
|
||||
msgBuffer = await repository.decryptGroupMessage({
|
||||
group: sender,
|
||||
authorJid: author,
|
||||
msg: content
|
||||
})
|
||||
break
|
||||
case 'pkmsg':
|
||||
case 'msg':
|
||||
const user = isJidUser(sender) ? sender : author
|
||||
msgBuffer = await repository.decryptMessage({
|
||||
jid: user,
|
||||
type: e2eType,
|
||||
ciphertext: content
|
||||
})
|
||||
break
|
||||
case 'plaintext':
|
||||
msgBuffer = content
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown e2e type: ${e2eType}`)
|
||||
}
|
||||
|
||||
let msg: proto.IMessage = proto.Message.decode(e2eType !== 'plaintext' ? unpadRandomMax16(msgBuffer) : msgBuffer)
|
||||
let msg: proto.IMessage = proto.Message.decode(
|
||||
e2eType !== 'plaintext' ? unpadRandomMax16(msgBuffer) : msgBuffer
|
||||
)
|
||||
msg = msg.deviceSentMessage?.message || msg
|
||||
if(msg.senderKeyDistributionMessage) {
|
||||
if (msg.senderKeyDistributionMessage) {
|
||||
//eslint-disable-next-line max-depth
|
||||
try {
|
||||
try {
|
||||
await repository.processSenderKeyDistributionMessage({
|
||||
authorJid: author,
|
||||
item: msg.senderKeyDistributionMessage
|
||||
})
|
||||
} catch(err) {
|
||||
} catch (err) {
|
||||
logger.error({ key: fullMessage.key, err }, 'failed to decrypt message')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(fullMessage.message) {
|
||||
if (fullMessage.message) {
|
||||
Object.assign(fullMessage.message, msg)
|
||||
} else {
|
||||
fullMessage.message = msg
|
||||
}
|
||||
} catch(err) {
|
||||
logger.error(
|
||||
{ key: fullMessage.key, err },
|
||||
'failed to decrypt message'
|
||||
)
|
||||
} catch (err) {
|
||||
logger.error({ key: fullMessage.key, err }, 'failed to decrypt message')
|
||||
fullMessage.messageStubType = proto.WebMessageInfo.StubType.CIPHERTEXT
|
||||
fullMessage.messageStubParameters = [err.message]
|
||||
}
|
||||
@@ -209,7 +221,7 @@ export const decryptMessageNode = (
|
||||
}
|
||||
|
||||
// if nothing was found to decrypt
|
||||
if(!decryptables) {
|
||||
if (!decryptables) {
|
||||
fullMessage.messageStubType = proto.WebMessageInfo.StubType.CIPHERTEXT
|
||||
fullMessage.messageStubParameters = [NO_MESSAGE_FOUND_ERROR_TEXT]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
import EventEmitter from 'events'
|
||||
import { proto } from '../../WAProto'
|
||||
import { BaileysEvent, BaileysEventEmitter, BaileysEventMap, BufferedEventData, Chat, ChatUpdate, Contact, WAMessage, WAMessageStatus } from '../Types'
|
||||
import {
|
||||
BaileysEvent,
|
||||
BaileysEventEmitter,
|
||||
BaileysEventMap,
|
||||
BufferedEventData,
|
||||
Chat,
|
||||
ChatUpdate,
|
||||
Contact,
|
||||
WAMessage,
|
||||
WAMessageStatus
|
||||
} from '../Types'
|
||||
import { trimUndefined } from './generics'
|
||||
import { ILogger } from './logger'
|
||||
import { updateMessageWithReaction, updateMessageWithReceipt } from './messages'
|
||||
@@ -18,10 +28,10 @@ const BUFFERABLE_EVENT = [
|
||||
'messages.delete',
|
||||
'messages.reaction',
|
||||
'message-receipt.update',
|
||||
'groups.update',
|
||||
'groups.update'
|
||||
] as const
|
||||
|
||||
type BufferableEvent = typeof BUFFERABLE_EVENT[number]
|
||||
type BufferableEvent = (typeof BUFFERABLE_EVENT)[number]
|
||||
|
||||
/**
|
||||
* A map that contains a list of all events that have been triggered
|
||||
@@ -36,14 +46,14 @@ const BUFFERABLE_EVENT_SET = new Set<BaileysEvent>(BUFFERABLE_EVENT)
|
||||
|
||||
type BaileysBufferableEventEmitter = BaileysEventEmitter & {
|
||||
/** Use to process events in a batch */
|
||||
process(handler: (events: BaileysEventData) => void | Promise<void>): (() => void)
|
||||
process(handler: (events: BaileysEventData) => void | Promise<void>): () => void
|
||||
/**
|
||||
* starts buffering events, call flush() to release them
|
||||
* */
|
||||
buffer(): void
|
||||
/** buffers all events till the promise completes */
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
createBufferedFunction<A extends any[], T>(work: (...args: A) => Promise<T>): ((...args: A) => Promise<T>)
|
||||
createBufferedFunction<A extends any[], T>(work: (...args: A) => Promise<T>): (...args: A) => Promise<T>
|
||||
/**
|
||||
* flushes all buffered events
|
||||
* @param force if true, will flush all data regardless of any pending buffers
|
||||
@@ -68,7 +78,7 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
|
||||
// take the generic event and fire it as a baileys event
|
||||
ev.on('event', (map: BaileysEventData) => {
|
||||
for(const event in map) {
|
||||
for (const event in map) {
|
||||
ev.emit(event, map[event])
|
||||
}
|
||||
})
|
||||
@@ -79,16 +89,16 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
|
||||
function flush(force = false) {
|
||||
// no buffer going on
|
||||
if(!buffersInProgress) {
|
||||
if (!buffersInProgress) {
|
||||
return false
|
||||
}
|
||||
|
||||
if(!force) {
|
||||
if (!force) {
|
||||
// reduce the number of buffers in progress
|
||||
buffersInProgress -= 1
|
||||
// if there are still some buffers going on
|
||||
// then we don't flush now
|
||||
if(buffersInProgress) {
|
||||
if (buffersInProgress) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@@ -97,8 +107,8 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
const chatUpdates = Object.values(data.chatUpdates)
|
||||
// gather the remaining conditional events so we re-queue them
|
||||
let conditionalChatUpdatesLeft = 0
|
||||
for(const update of chatUpdates) {
|
||||
if(update.conditional) {
|
||||
for (const update of chatUpdates) {
|
||||
if (update.conditional) {
|
||||
conditionalChatUpdatesLeft += 1
|
||||
newData.chatUpdates[update.id!] = update
|
||||
delete data.chatUpdates[update.id!]
|
||||
@@ -106,16 +116,13 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
}
|
||||
|
||||
const consolidatedData = consolidateEvents(data)
|
||||
if(Object.keys(consolidatedData).length) {
|
||||
if (Object.keys(consolidatedData).length) {
|
||||
ev.emit('event', consolidatedData)
|
||||
}
|
||||
|
||||
data = newData
|
||||
|
||||
logger.trace(
|
||||
{ conditionalChatUpdatesLeft },
|
||||
'released buffered events'
|
||||
)
|
||||
logger.trace({ conditionalChatUpdatesLeft }, 'released buffered events')
|
||||
|
||||
return true
|
||||
}
|
||||
@@ -132,7 +139,7 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
}
|
||||
},
|
||||
emit<T extends BaileysEvent>(event: BaileysEvent, evData: BaileysEventMap[T]) {
|
||||
if(buffersInProgress && BUFFERABLE_EVENT_SET.has(event)) {
|
||||
if (buffersInProgress && BUFFERABLE_EVENT_SET.has(event)) {
|
||||
append(data, historyCache, event as BufferableEvent, evData, logger)
|
||||
return true
|
||||
}
|
||||
@@ -145,7 +152,7 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
buffer,
|
||||
flush,
|
||||
createBufferedFunction(work) {
|
||||
return async(...args) => {
|
||||
return async (...args) => {
|
||||
buffer()
|
||||
try {
|
||||
const result = await work(...args)
|
||||
@@ -157,30 +164,30 @@ export const makeEventBuffer = (logger: ILogger): BaileysBufferableEventEmitter
|
||||
},
|
||||
on: (...args) => ev.on(...args),
|
||||
off: (...args) => ev.off(...args),
|
||||
removeAllListeners: (...args) => ev.removeAllListeners(...args),
|
||||
removeAllListeners: (...args) => ev.removeAllListeners(...args)
|
||||
}
|
||||
}
|
||||
|
||||
const makeBufferData = (): BufferedEventData => {
|
||||
return {
|
||||
historySets: {
|
||||
chats: { },
|
||||
messages: { },
|
||||
contacts: { },
|
||||
chats: {},
|
||||
messages: {},
|
||||
contacts: {},
|
||||
isLatest: false,
|
||||
empty: true
|
||||
},
|
||||
chatUpserts: { },
|
||||
chatUpdates: { },
|
||||
chatUpserts: {},
|
||||
chatUpdates: {},
|
||||
chatDeletes: new Set(),
|
||||
contactUpserts: { },
|
||||
contactUpdates: { },
|
||||
messageUpserts: { },
|
||||
messageUpdates: { },
|
||||
messageReactions: { },
|
||||
messageDeletes: { },
|
||||
messageReceipts: { },
|
||||
groupUpdates: { }
|
||||
contactUpserts: {},
|
||||
contactUpdates: {},
|
||||
messageUpserts: {},
|
||||
messageUpdates: {},
|
||||
messageReactions: {},
|
||||
messageDeletes: {},
|
||||
messageReceipts: {},
|
||||
groupUpdates: {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -193,305 +200,298 @@ function append<E extends BufferableEvent>(
|
||||
logger: ILogger
|
||||
) {
|
||||
switch (event) {
|
||||
case 'messaging-history.set':
|
||||
for(const chat of eventData.chats as Chat[]) {
|
||||
const existingChat = data.historySets.chats[chat.id]
|
||||
if(existingChat) {
|
||||
existingChat.endOfHistoryTransferType = chat.endOfHistoryTransferType
|
||||
}
|
||||
|
||||
if(!existingChat && !historyCache.has(chat.id)) {
|
||||
data.historySets.chats[chat.id] = chat
|
||||
historyCache.add(chat.id)
|
||||
|
||||
absorbingChatUpdate(chat)
|
||||
}
|
||||
}
|
||||
|
||||
for(const contact of eventData.contacts as Contact[]) {
|
||||
const existingContact = data.historySets.contacts[contact.id]
|
||||
if(existingContact) {
|
||||
Object.assign(existingContact, trimUndefined(contact))
|
||||
} else {
|
||||
const historyContactId = `c:${contact.id}`
|
||||
const hasAnyName = contact.notify || contact.name || contact.verifiedName
|
||||
if(!historyCache.has(historyContactId) || hasAnyName) {
|
||||
data.historySets.contacts[contact.id] = contact
|
||||
historyCache.add(historyContactId)
|
||||
case 'messaging-history.set':
|
||||
for (const chat of eventData.chats as Chat[]) {
|
||||
const existingChat = data.historySets.chats[chat.id]
|
||||
if (existingChat) {
|
||||
existingChat.endOfHistoryTransferType = chat.endOfHistoryTransferType
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for(const message of eventData.messages as WAMessage[]) {
|
||||
const key = stringifyMessageKey(message.key)
|
||||
const existingMsg = data.historySets.messages[key]
|
||||
if(!existingMsg && !historyCache.has(key)) {
|
||||
data.historySets.messages[key] = message
|
||||
historyCache.add(key)
|
||||
}
|
||||
}
|
||||
if (!existingChat && !historyCache.has(chat.id)) {
|
||||
data.historySets.chats[chat.id] = chat
|
||||
historyCache.add(chat.id)
|
||||
|
||||
data.historySets.empty = false
|
||||
data.historySets.syncType = eventData.syncType
|
||||
data.historySets.progress = eventData.progress
|
||||
data.historySets.peerDataRequestSessionId = eventData.peerDataRequestSessionId
|
||||
data.historySets.isLatest = eventData.isLatest || data.historySets.isLatest
|
||||
|
||||
break
|
||||
case 'chats.upsert':
|
||||
for(const chat of eventData as Chat[]) {
|
||||
let upsert = data.chatUpserts[chat.id]
|
||||
if(!upsert) {
|
||||
upsert = data.historySets[chat.id]
|
||||
if(upsert) {
|
||||
logger.debug({ chatId: chat.id }, 'absorbed chat upsert in chat set')
|
||||
absorbingChatUpdate(chat)
|
||||
}
|
||||
}
|
||||
|
||||
if(upsert) {
|
||||
upsert = concatChats(upsert, chat)
|
||||
} else {
|
||||
upsert = chat
|
||||
data.chatUpserts[chat.id] = upsert
|
||||
}
|
||||
|
||||
absorbingChatUpdate(upsert)
|
||||
|
||||
if(data.chatDeletes.has(chat.id)) {
|
||||
data.chatDeletes.delete(chat.id)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'chats.update':
|
||||
for(const update of eventData as ChatUpdate[]) {
|
||||
const chatId = update.id!
|
||||
const conditionMatches = update.conditional ? update.conditional(data) : true
|
||||
if(conditionMatches) {
|
||||
delete update.conditional
|
||||
|
||||
// if there is an existing upsert, merge the update into it
|
||||
const upsert = data.historySets.chats[chatId] || data.chatUpserts[chatId]
|
||||
if(upsert) {
|
||||
concatChats(upsert, update)
|
||||
for (const contact of eventData.contacts as Contact[]) {
|
||||
const existingContact = data.historySets.contacts[contact.id]
|
||||
if (existingContact) {
|
||||
Object.assign(existingContact, trimUndefined(contact))
|
||||
} else {
|
||||
// merge the update into the existing update
|
||||
const chatUpdate = data.chatUpdates[chatId] || { }
|
||||
data.chatUpdates[chatId] = concatChats(chatUpdate, update)
|
||||
}
|
||||
} else if(conditionMatches === undefined) {
|
||||
// condition yet to be fulfilled
|
||||
data.chatUpdates[chatId] = update
|
||||
}
|
||||
// otherwise -- condition not met, update is invalid
|
||||
|
||||
// if the chat has been updated
|
||||
// ignore any existing chat delete
|
||||
if(data.chatDeletes.has(chatId)) {
|
||||
data.chatDeletes.delete(chatId)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'chats.delete':
|
||||
for(const chatId of eventData as string[]) {
|
||||
if(!data.chatDeletes.has(chatId)) {
|
||||
data.chatDeletes.add(chatId)
|
||||
}
|
||||
|
||||
// remove any prior updates & upserts
|
||||
if(data.chatUpdates[chatId]) {
|
||||
delete data.chatUpdates[chatId]
|
||||
}
|
||||
|
||||
if(data.chatUpserts[chatId]) {
|
||||
delete data.chatUpserts[chatId]
|
||||
|
||||
}
|
||||
|
||||
if(data.historySets.chats[chatId]) {
|
||||
delete data.historySets.chats[chatId]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'contacts.upsert':
|
||||
for(const contact of eventData as Contact[]) {
|
||||
let upsert = data.contactUpserts[contact.id]
|
||||
if(!upsert) {
|
||||
upsert = data.historySets.contacts[contact.id]
|
||||
if(upsert) {
|
||||
logger.debug({ contactId: contact.id }, 'absorbed contact upsert in contact set')
|
||||
const historyContactId = `c:${contact.id}`
|
||||
const hasAnyName = contact.notify || contact.name || contact.verifiedName
|
||||
if (!historyCache.has(historyContactId) || hasAnyName) {
|
||||
data.historySets.contacts[contact.id] = contact
|
||||
historyCache.add(historyContactId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(upsert) {
|
||||
upsert = Object.assign(upsert, trimUndefined(contact))
|
||||
} else {
|
||||
upsert = contact
|
||||
data.contactUpserts[contact.id] = upsert
|
||||
}
|
||||
|
||||
if(data.contactUpdates[contact.id]) {
|
||||
upsert = Object.assign(data.contactUpdates[contact.id], trimUndefined(contact)) as Contact
|
||||
delete data.contactUpdates[contact.id]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'contacts.update':
|
||||
const contactUpdates = eventData as BaileysEventMap['contacts.update']
|
||||
for(const update of contactUpdates) {
|
||||
const id = update.id!
|
||||
// merge into prior upsert
|
||||
const upsert = data.historySets.contacts[id] || data.contactUpserts[id]
|
||||
if(upsert) {
|
||||
Object.assign(upsert, update)
|
||||
} else {
|
||||
// merge into prior update
|
||||
const contactUpdate = data.contactUpdates[id] || { }
|
||||
data.contactUpdates[id] = Object.assign(contactUpdate, update)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'messages.upsert':
|
||||
const { messages, type } = eventData as BaileysEventMap['messages.upsert']
|
||||
for(const message of messages) {
|
||||
const key = stringifyMessageKey(message.key)
|
||||
let existing = data.messageUpserts[key]?.message
|
||||
if(!existing) {
|
||||
existing = data.historySets.messages[key]
|
||||
if(existing) {
|
||||
logger.debug({ messageId: key }, 'absorbed message upsert in message set')
|
||||
for (const message of eventData.messages as WAMessage[]) {
|
||||
const key = stringifyMessageKey(message.key)
|
||||
const existingMsg = data.historySets.messages[key]
|
||||
if (!existingMsg && !historyCache.has(key)) {
|
||||
data.historySets.messages[key] = message
|
||||
historyCache.add(key)
|
||||
}
|
||||
}
|
||||
|
||||
if(existing) {
|
||||
message.messageTimestamp = existing.messageTimestamp
|
||||
}
|
||||
data.historySets.empty = false
|
||||
data.historySets.syncType = eventData.syncType
|
||||
data.historySets.progress = eventData.progress
|
||||
data.historySets.peerDataRequestSessionId = eventData.peerDataRequestSessionId
|
||||
data.historySets.isLatest = eventData.isLatest || data.historySets.isLatest
|
||||
|
||||
if(data.messageUpdates[key]) {
|
||||
logger.debug('absorbed prior message update in message upsert')
|
||||
Object.assign(message, data.messageUpdates[key].update)
|
||||
delete data.messageUpdates[key]
|
||||
}
|
||||
break
|
||||
case 'chats.upsert':
|
||||
for (const chat of eventData as Chat[]) {
|
||||
let upsert = data.chatUpserts[chat.id]
|
||||
if (!upsert) {
|
||||
upsert = data.historySets[chat.id]
|
||||
if (upsert) {
|
||||
logger.debug({ chatId: chat.id }, 'absorbed chat upsert in chat set')
|
||||
}
|
||||
}
|
||||
|
||||
if(data.historySets.messages[key]) {
|
||||
data.historySets.messages[key] = message
|
||||
} else {
|
||||
data.messageUpserts[key] = {
|
||||
message,
|
||||
type: type === 'notify' || data.messageUpserts[key]?.type === 'notify'
|
||||
? 'notify'
|
||||
: type
|
||||
if (upsert) {
|
||||
upsert = concatChats(upsert, chat)
|
||||
} else {
|
||||
upsert = chat
|
||||
data.chatUpserts[chat.id] = upsert
|
||||
}
|
||||
|
||||
absorbingChatUpdate(upsert)
|
||||
|
||||
if (data.chatDeletes.has(chat.id)) {
|
||||
data.chatDeletes.delete(chat.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'messages.update':
|
||||
const msgUpdates = eventData as BaileysEventMap['messages.update']
|
||||
for(const { key, update } of msgUpdates) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
const existing = data.historySets.messages[keyStr] || data.messageUpserts[keyStr]?.message
|
||||
if(existing) {
|
||||
Object.assign(existing, update)
|
||||
// if the message was received & read by us
|
||||
// the chat counter must have been incremented
|
||||
// so we need to decrement it
|
||||
if(update.status === WAMessageStatus.READ && !key.fromMe) {
|
||||
decrementChatReadCounterIfMsgDidUnread(existing)
|
||||
break
|
||||
case 'chats.update':
|
||||
for (const update of eventData as ChatUpdate[]) {
|
||||
const chatId = update.id!
|
||||
const conditionMatches = update.conditional ? update.conditional(data) : true
|
||||
if (conditionMatches) {
|
||||
delete update.conditional
|
||||
|
||||
// if there is an existing upsert, merge the update into it
|
||||
const upsert = data.historySets.chats[chatId] || data.chatUpserts[chatId]
|
||||
if (upsert) {
|
||||
concatChats(upsert, update)
|
||||
} else {
|
||||
// merge the update into the existing update
|
||||
const chatUpdate = data.chatUpdates[chatId] || {}
|
||||
data.chatUpdates[chatId] = concatChats(chatUpdate, update)
|
||||
}
|
||||
} else if (conditionMatches === undefined) {
|
||||
// condition yet to be fulfilled
|
||||
data.chatUpdates[chatId] = update
|
||||
}
|
||||
} else {
|
||||
const msgUpdate = data.messageUpdates[keyStr] || { key, update: { } }
|
||||
Object.assign(msgUpdate.update, update)
|
||||
data.messageUpdates[keyStr] = msgUpdate
|
||||
}
|
||||
}
|
||||
// otherwise -- condition not met, update is invalid
|
||||
|
||||
break
|
||||
case 'messages.delete':
|
||||
const deleteData = eventData as BaileysEventMap['messages.delete']
|
||||
if('keys' in deleteData) {
|
||||
const { keys } = deleteData
|
||||
for(const key of keys) {
|
||||
// if the chat has been updated
|
||||
// ignore any existing chat delete
|
||||
if (data.chatDeletes.has(chatId)) {
|
||||
data.chatDeletes.delete(chatId)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'chats.delete':
|
||||
for (const chatId of eventData as string[]) {
|
||||
if (!data.chatDeletes.has(chatId)) {
|
||||
data.chatDeletes.add(chatId)
|
||||
}
|
||||
|
||||
// remove any prior updates & upserts
|
||||
if (data.chatUpdates[chatId]) {
|
||||
delete data.chatUpdates[chatId]
|
||||
}
|
||||
|
||||
if (data.chatUpserts[chatId]) {
|
||||
delete data.chatUpserts[chatId]
|
||||
}
|
||||
|
||||
if (data.historySets.chats[chatId]) {
|
||||
delete data.historySets.chats[chatId]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'contacts.upsert':
|
||||
for (const contact of eventData as Contact[]) {
|
||||
let upsert = data.contactUpserts[contact.id]
|
||||
if (!upsert) {
|
||||
upsert = data.historySets.contacts[contact.id]
|
||||
if (upsert) {
|
||||
logger.debug({ contactId: contact.id }, 'absorbed contact upsert in contact set')
|
||||
}
|
||||
}
|
||||
|
||||
if (upsert) {
|
||||
upsert = Object.assign(upsert, trimUndefined(contact))
|
||||
} else {
|
||||
upsert = contact
|
||||
data.contactUpserts[contact.id] = upsert
|
||||
}
|
||||
|
||||
if (data.contactUpdates[contact.id]) {
|
||||
upsert = Object.assign(data.contactUpdates[contact.id], trimUndefined(contact)) as Contact
|
||||
delete data.contactUpdates[contact.id]
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'contacts.update':
|
||||
const contactUpdates = eventData as BaileysEventMap['contacts.update']
|
||||
for (const update of contactUpdates) {
|
||||
const id = update.id!
|
||||
// merge into prior upsert
|
||||
const upsert = data.historySets.contacts[id] || data.contactUpserts[id]
|
||||
if (upsert) {
|
||||
Object.assign(upsert, update)
|
||||
} else {
|
||||
// merge into prior update
|
||||
const contactUpdate = data.contactUpdates[id] || {}
|
||||
data.contactUpdates[id] = Object.assign(contactUpdate, update)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'messages.upsert':
|
||||
const { messages, type } = eventData as BaileysEventMap['messages.upsert']
|
||||
for (const message of messages) {
|
||||
const key = stringifyMessageKey(message.key)
|
||||
let existing = data.messageUpserts[key]?.message
|
||||
if (!existing) {
|
||||
existing = data.historySets.messages[key]
|
||||
if (existing) {
|
||||
logger.debug({ messageId: key }, 'absorbed message upsert in message set')
|
||||
}
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
message.messageTimestamp = existing.messageTimestamp
|
||||
}
|
||||
|
||||
if (data.messageUpdates[key]) {
|
||||
logger.debug('absorbed prior message update in message upsert')
|
||||
Object.assign(message, data.messageUpdates[key].update)
|
||||
delete data.messageUpdates[key]
|
||||
}
|
||||
|
||||
if (data.historySets.messages[key]) {
|
||||
data.historySets.messages[key] = message
|
||||
} else {
|
||||
data.messageUpserts[key] = {
|
||||
message,
|
||||
type: type === 'notify' || data.messageUpserts[key]?.type === 'notify' ? 'notify' : type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'messages.update':
|
||||
const msgUpdates = eventData as BaileysEventMap['messages.update']
|
||||
for (const { key, update } of msgUpdates) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
if(!data.messageDeletes[keyStr]) {
|
||||
data.messageDeletes[keyStr] = key
|
||||
|
||||
}
|
||||
|
||||
if(data.messageUpserts[keyStr]) {
|
||||
delete data.messageUpserts[keyStr]
|
||||
}
|
||||
|
||||
if(data.messageUpdates[keyStr]) {
|
||||
delete data.messageUpdates[keyStr]
|
||||
const existing = data.historySets.messages[keyStr] || data.messageUpserts[keyStr]?.message
|
||||
if (existing) {
|
||||
Object.assign(existing, update)
|
||||
// if the message was received & read by us
|
||||
// the chat counter must have been incremented
|
||||
// so we need to decrement it
|
||||
if (update.status === WAMessageStatus.READ && !key.fromMe) {
|
||||
decrementChatReadCounterIfMsgDidUnread(existing)
|
||||
}
|
||||
} else {
|
||||
const msgUpdate = data.messageUpdates[keyStr] || { key, update: {} }
|
||||
Object.assign(msgUpdate.update, update)
|
||||
data.messageUpdates[keyStr] = msgUpdate
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// TODO: add support
|
||||
}
|
||||
|
||||
break
|
||||
case 'messages.reaction':
|
||||
const reactions = eventData as BaileysEventMap['messages.reaction']
|
||||
for(const { key, reaction } of reactions) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
const existing = data.messageUpserts[keyStr]
|
||||
if(existing) {
|
||||
updateMessageWithReaction(existing.message, reaction)
|
||||
break
|
||||
case 'messages.delete':
|
||||
const deleteData = eventData as BaileysEventMap['messages.delete']
|
||||
if ('keys' in deleteData) {
|
||||
const { keys } = deleteData
|
||||
for (const key of keys) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
if (!data.messageDeletes[keyStr]) {
|
||||
data.messageDeletes[keyStr] = key
|
||||
}
|
||||
|
||||
if (data.messageUpserts[keyStr]) {
|
||||
delete data.messageUpserts[keyStr]
|
||||
}
|
||||
|
||||
if (data.messageUpdates[keyStr]) {
|
||||
delete data.messageUpdates[keyStr]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
data.messageReactions[keyStr] = data.messageReactions[keyStr]
|
||||
|| { key, reactions: [] }
|
||||
updateMessageWithReaction(data.messageReactions[keyStr], reaction)
|
||||
// TODO: add support
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'message-receipt.update':
|
||||
const receipts = eventData as BaileysEventMap['message-receipt.update']
|
||||
for(const { key, receipt } of receipts) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
const existing = data.messageUpserts[keyStr]
|
||||
if(existing) {
|
||||
updateMessageWithReceipt(existing.message, receipt)
|
||||
} else {
|
||||
data.messageReceipts[keyStr] = data.messageReceipts[keyStr]
|
||||
|| { key, userReceipt: [] }
|
||||
updateMessageWithReceipt(data.messageReceipts[keyStr], receipt)
|
||||
break
|
||||
case 'messages.reaction':
|
||||
const reactions = eventData as BaileysEventMap['messages.reaction']
|
||||
for (const { key, reaction } of reactions) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
const existing = data.messageUpserts[keyStr]
|
||||
if (existing) {
|
||||
updateMessageWithReaction(existing.message, reaction)
|
||||
} else {
|
||||
data.messageReactions[keyStr] = data.messageReactions[keyStr] || { key, reactions: [] }
|
||||
updateMessageWithReaction(data.messageReactions[keyStr], reaction)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
case 'groups.update':
|
||||
const groupUpdates = eventData as BaileysEventMap['groups.update']
|
||||
for(const update of groupUpdates) {
|
||||
const id = update.id!
|
||||
const groupUpdate = data.groupUpdates[id] || { }
|
||||
if(!data.groupUpdates[id]) {
|
||||
data.groupUpdates[id] = Object.assign(groupUpdate, update)
|
||||
|
||||
break
|
||||
case 'message-receipt.update':
|
||||
const receipts = eventData as BaileysEventMap['message-receipt.update']
|
||||
for (const { key, receipt } of receipts) {
|
||||
const keyStr = stringifyMessageKey(key)
|
||||
const existing = data.messageUpserts[keyStr]
|
||||
if (existing) {
|
||||
updateMessageWithReceipt(existing.message, receipt)
|
||||
} else {
|
||||
data.messageReceipts[keyStr] = data.messageReceipts[keyStr] || { key, userReceipt: [] }
|
||||
updateMessageWithReceipt(data.messageReceipts[keyStr], receipt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
default:
|
||||
throw new Error(`"${event}" cannot be buffered`)
|
||||
break
|
||||
case 'groups.update':
|
||||
const groupUpdates = eventData as BaileysEventMap['groups.update']
|
||||
for (const update of groupUpdates) {
|
||||
const id = update.id!
|
||||
const groupUpdate = data.groupUpdates[id] || {}
|
||||
if (!data.groupUpdates[id]) {
|
||||
data.groupUpdates[id] = Object.assign(groupUpdate, update)
|
||||
}
|
||||
}
|
||||
|
||||
break
|
||||
default:
|
||||
throw new Error(`"${event}" cannot be buffered`)
|
||||
}
|
||||
|
||||
function absorbingChatUpdate(existing: Chat) {
|
||||
const chatId = existing.id
|
||||
const update = data.chatUpdates[chatId]
|
||||
if(update) {
|
||||
if (update) {
|
||||
const conditionMatches = update.conditional ? update.conditional(data) : true
|
||||
if(conditionMatches) {
|
||||
if (conditionMatches) {
|
||||
delete update.conditional
|
||||
logger.debug({ chatId }, 'absorbed chat update in existing chat')
|
||||
Object.assign(existing, concatChats(update as Chat, existing))
|
||||
delete data.chatUpdates[chatId]
|
||||
} else if(conditionMatches === false) {
|
||||
} else if (conditionMatches === false) {
|
||||
logger.debug({ chatId }, 'chat update condition fail, removing')
|
||||
delete data.chatUpdates[chatId]
|
||||
}
|
||||
@@ -503,15 +503,15 @@ function append<E extends BufferableEvent>(
|
||||
// if the message has already been marked read by us
|
||||
const chatId = message.key.remoteJid!
|
||||
const chat = data.chatUpdates[chatId] || data.chatUpserts[chatId]
|
||||
if(
|
||||
isRealMessage(message, '')
|
||||
&& shouldIncrementChatUnread(message)
|
||||
&& typeof chat?.unreadCount === 'number'
|
||||
&& chat.unreadCount > 0
|
||||
if (
|
||||
isRealMessage(message, '') &&
|
||||
shouldIncrementChatUnread(message) &&
|
||||
typeof chat?.unreadCount === 'number' &&
|
||||
chat.unreadCount > 0
|
||||
) {
|
||||
logger.debug({ chatId: chat.id }, 'decrementing chat counter')
|
||||
chat.unreadCount -= 1
|
||||
if(chat.unreadCount === 0) {
|
||||
if (chat.unreadCount === 0) {
|
||||
delete chat.unreadCount
|
||||
}
|
||||
}
|
||||
@@ -519,9 +519,9 @@ function append<E extends BufferableEvent>(
|
||||
}
|
||||
|
||||
function consolidateEvents(data: BufferedEventData) {
|
||||
const map: BaileysEventData = { }
|
||||
const map: BaileysEventData = {}
|
||||
|
||||
if(!data.historySets.empty) {
|
||||
if (!data.historySets.empty) {
|
||||
map['messaging-history.set'] = {
|
||||
chats: Object.values(data.historySets.chats),
|
||||
messages: Object.values(data.historySets.messages),
|
||||
@@ -534,22 +534,22 @@ function consolidateEvents(data: BufferedEventData) {
|
||||
}
|
||||
|
||||
const chatUpsertList = Object.values(data.chatUpserts)
|
||||
if(chatUpsertList.length) {
|
||||
if (chatUpsertList.length) {
|
||||
map['chats.upsert'] = chatUpsertList
|
||||
}
|
||||
|
||||
const chatUpdateList = Object.values(data.chatUpdates)
|
||||
if(chatUpdateList.length) {
|
||||
if (chatUpdateList.length) {
|
||||
map['chats.update'] = chatUpdateList
|
||||
}
|
||||
|
||||
const chatDeleteList = Array.from(data.chatDeletes)
|
||||
if(chatDeleteList.length) {
|
||||
if (chatDeleteList.length) {
|
||||
map['chats.delete'] = chatDeleteList
|
||||
}
|
||||
|
||||
const messageUpsertList = Object.values(data.messageUpserts)
|
||||
if(messageUpsertList.length) {
|
||||
if (messageUpsertList.length) {
|
||||
const type = messageUpsertList[0].type
|
||||
map['messages.upsert'] = {
|
||||
messages: messageUpsertList.map(m => m.message),
|
||||
@@ -558,41 +558,41 @@ function consolidateEvents(data: BufferedEventData) {
|
||||
}
|
||||
|
||||
const messageUpdateList = Object.values(data.messageUpdates)
|
||||
if(messageUpdateList.length) {
|
||||
if (messageUpdateList.length) {
|
||||
map['messages.update'] = messageUpdateList
|
||||
}
|
||||
|
||||
const messageDeleteList = Object.values(data.messageDeletes)
|
||||
if(messageDeleteList.length) {
|
||||
if (messageDeleteList.length) {
|
||||
map['messages.delete'] = { keys: messageDeleteList }
|
||||
}
|
||||
|
||||
const messageReactionList = Object.values(data.messageReactions).flatMap(
|
||||
({ key, reactions }) => reactions.flatMap(reaction => ({ key, reaction }))
|
||||
const messageReactionList = Object.values(data.messageReactions).flatMap(({ key, reactions }) =>
|
||||
reactions.flatMap(reaction => ({ key, reaction }))
|
||||
)
|
||||
if(messageReactionList.length) {
|
||||
if (messageReactionList.length) {
|
||||
map['messages.reaction'] = messageReactionList
|
||||
}
|
||||
|
||||
const messageReceiptList = Object.values(data.messageReceipts).flatMap(
|
||||
({ key, userReceipt }) => userReceipt.flatMap(receipt => ({ key, receipt }))
|
||||
const messageReceiptList = Object.values(data.messageReceipts).flatMap(({ key, userReceipt }) =>
|
||||
userReceipt.flatMap(receipt => ({ key, receipt }))
|
||||
)
|
||||
if(messageReceiptList.length) {
|
||||
if (messageReceiptList.length) {
|
||||
map['message-receipt.update'] = messageReceiptList
|
||||
}
|
||||
|
||||
const contactUpsertList = Object.values(data.contactUpserts)
|
||||
if(contactUpsertList.length) {
|
||||
if (contactUpsertList.length) {
|
||||
map['contacts.upsert'] = contactUpsertList
|
||||
}
|
||||
|
||||
const contactUpdateList = Object.values(data.contactUpdates)
|
||||
if(contactUpdateList.length) {
|
||||
if (contactUpdateList.length) {
|
||||
map['contacts.update'] = contactUpdateList
|
||||
}
|
||||
|
||||
const groupUpdateList = Object.values(data.groupUpdates)
|
||||
if(groupUpdateList.length) {
|
||||
if (groupUpdateList.length) {
|
||||
map['groups.update'] = groupUpdateList
|
||||
}
|
||||
|
||||
@@ -600,15 +600,17 @@ function consolidateEvents(data: BufferedEventData) {
|
||||
}
|
||||
|
||||
function concatChats<C extends Partial<Chat>>(a: C, b: Partial<Chat>) {
|
||||
if(b.unreadCount === null && // neutralize unread counter
|
||||
a.unreadCount! < 0) {
|
||||
if (
|
||||
b.unreadCount === null && // neutralize unread counter
|
||||
a.unreadCount! < 0
|
||||
) {
|
||||
a.unreadCount = undefined
|
||||
b.unreadCount = undefined
|
||||
}
|
||||
|
||||
if(typeof a.unreadCount === 'number' && typeof b.unreadCount === 'number') {
|
||||
if (typeof a.unreadCount === 'number' && typeof b.unreadCount === 'number') {
|
||||
b = { ...b }
|
||||
if(b.unreadCount! >= 0) {
|
||||
if (b.unreadCount! >= 0) {
|
||||
b.unreadCount = Math.max(b.unreadCount!, 0) + Math.max(a.unreadCount, 0)
|
||||
}
|
||||
}
|
||||
@@ -616,4 +618,4 @@ function concatChats<C extends Partial<Chat>>(a: C, b: Partial<Chat>) {
|
||||
return Object.assign(a, b)
|
||||
}
|
||||
|
||||
const stringifyMessageKey = (key: proto.IMessageKey) => `${key.remoteJid},${key.id},${key.fromMe ? '1' : '0'}`
|
||||
const stringifyMessageKey = (key: proto.IMessageKey) => `${key.remoteJid},${key.id},${key.fromMe ? '1' : '0'}`
|
||||
|
||||
@@ -4,26 +4,34 @@ import { createHash, randomBytes } from 'crypto'
|
||||
import { platform, release } from 'os'
|
||||
import { proto } from '../../WAProto'
|
||||
import { version as baileysVersion } from '../Defaults/baileys-version.json'
|
||||
import { BaileysEventEmitter, BaileysEventMap, BrowsersMap, ConnectionState, DisconnectReason, WACallUpdateType, WAVersion } from '../Types'
|
||||
import {
|
||||
BaileysEventEmitter,
|
||||
BaileysEventMap,
|
||||
BrowsersMap,
|
||||
ConnectionState,
|
||||
DisconnectReason,
|
||||
WACallUpdateType,
|
||||
WAVersion
|
||||
} from '../Types'
|
||||
import { BinaryNode, getAllBinaryNodeChildren, jidDecode } from '../WABinary'
|
||||
|
||||
const PLATFORM_MAP = {
|
||||
'aix': 'AIX',
|
||||
'darwin': 'Mac OS',
|
||||
'win32': 'Windows',
|
||||
'android': 'Android',
|
||||
'freebsd': 'FreeBSD',
|
||||
'openbsd': 'OpenBSD',
|
||||
'sunos': 'Solaris'
|
||||
aix: 'AIX',
|
||||
darwin: 'Mac OS',
|
||||
win32: 'Windows',
|
||||
android: 'Android',
|
||||
freebsd: 'FreeBSD',
|
||||
openbsd: 'OpenBSD',
|
||||
sunos: 'Solaris'
|
||||
}
|
||||
|
||||
export const Browsers: BrowsersMap = {
|
||||
ubuntu: (browser) => ['Ubuntu', browser, '22.04.4'],
|
||||
macOS: (browser) => ['Mac OS', browser, '14.4.1'],
|
||||
baileys: (browser) => ['Baileys', browser, '6.5.0'],
|
||||
windows: (browser) => ['Windows', browser, '10.0.22631'],
|
||||
ubuntu: browser => ['Ubuntu', browser, '22.04.4'],
|
||||
macOS: browser => ['Mac OS', browser, '14.4.1'],
|
||||
baileys: browser => ['Baileys', browser, '6.5.0'],
|
||||
windows: browser => ['Windows', browser, '10.0.22631'],
|
||||
/** The appropriate browser based on your OS & release */
|
||||
appropriate: (browser) => [ PLATFORM_MAP[platform()] || 'Ubuntu', browser, release() ]
|
||||
appropriate: browser => [PLATFORM_MAP[platform()] || 'Ubuntu', browser, release()]
|
||||
}
|
||||
|
||||
export const getPlatformId = (browser: string) => {
|
||||
@@ -34,7 +42,7 @@ export const getPlatformId = (browser: string) => {
|
||||
export const BufferJSON = {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
replacer: (k, value: any) => {
|
||||
if(Buffer.isBuffer(value) || value instanceof Uint8Array || value?.type === 'Buffer') {
|
||||
if (Buffer.isBuffer(value) || value instanceof Uint8Array || value?.type === 'Buffer') {
|
||||
return { type: 'Buffer', data: Buffer.from(value?.data || value).toString('base64') }
|
||||
}
|
||||
|
||||
@@ -43,7 +51,7 @@ export const BufferJSON = {
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
reviver: (_, value: any) => {
|
||||
if(typeof value === 'object' && !!value && (value.buffer === true || value.type === 'Buffer')) {
|
||||
if (typeof value === 'object' && !!value && (value.buffer === true || value.type === 'Buffer')) {
|
||||
const val = value.data || value.value
|
||||
return typeof val === 'string' ? Buffer.from(val, 'base64') : Buffer.from(val || [])
|
||||
}
|
||||
@@ -52,17 +60,13 @@ export const BufferJSON = {
|
||||
}
|
||||
}
|
||||
|
||||
export const getKeyAuthor = (
|
||||
key: proto.IMessageKey | undefined | null,
|
||||
meId = 'me'
|
||||
) => (
|
||||
export const getKeyAuthor = (key: proto.IMessageKey | undefined | null, meId = 'me') =>
|
||||
(key?.fromMe ? meId : key?.participant || key?.remoteJid) || ''
|
||||
)
|
||||
|
||||
export const writeRandomPadMax16 = (msg: Uint8Array) => {
|
||||
const pad = randomBytes(1)
|
||||
pad[0] &= 0xf
|
||||
if(!pad[0]) {
|
||||
if (!pad[0]) {
|
||||
pad[0] = 0xf
|
||||
}
|
||||
|
||||
@@ -71,23 +75,19 @@ export const writeRandomPadMax16 = (msg: Uint8Array) => {
|
||||
|
||||
export const unpadRandomMax16 = (e: Uint8Array | Buffer) => {
|
||||
const t = new Uint8Array(e)
|
||||
if(0 === t.length) {
|
||||
if (0 === t.length) {
|
||||
throw new Error('unpadPkcs7 given empty bytes')
|
||||
}
|
||||
|
||||
var r = t[t.length - 1]
|
||||
if(r > t.length) {
|
||||
if (r > t.length) {
|
||||
throw new Error(`unpad given ${t.length} bytes, but pad is ${r}`)
|
||||
}
|
||||
|
||||
return new Uint8Array(t.buffer, t.byteOffset, t.length - r)
|
||||
}
|
||||
|
||||
export const encodeWAMessage = (message: proto.IMessage) => (
|
||||
writeRandomPadMax16(
|
||||
proto.Message.encode(message).finish()
|
||||
)
|
||||
)
|
||||
export const encodeWAMessage = (message: proto.IMessage) => writeRandomPadMax16(proto.Message.encode(message).finish())
|
||||
|
||||
export const generateRegistrationId = (): number => {
|
||||
return Uint16Array.from(randomBytes(2))[0] & 16383
|
||||
@@ -96,7 +96,7 @@ export const generateRegistrationId = (): number => {
|
||||
export const encodeBigEndian = (e: number, t = 4) => {
|
||||
let r = e
|
||||
const a = new Uint8Array(t)
|
||||
for(let i = t - 1; i >= 0; i--) {
|
||||
for (let i = t - 1; i >= 0; i--) {
|
||||
a[i] = 255 & r
|
||||
r >>>= 8
|
||||
}
|
||||
@@ -104,7 +104,8 @@ export const encodeBigEndian = (e: number, t = 4) => {
|
||||
return a
|
||||
}
|
||||
|
||||
export const toNumber = (t: Long | number | null | undefined): number => ((typeof t === 'object' && t) ? ('toNumber' in t ? t.toNumber() : (t as Long).low) : t || 0)
|
||||
export const toNumber = (t: Long | number | null | undefined): number =>
|
||||
typeof t === 'object' && t ? ('toNumber' in t ? t.toNumber() : (t as Long).low) : t || 0
|
||||
|
||||
/** unix timestamp of a date in seconds */
|
||||
export const unixTimestampSeconds = (date: Date = new Date()) => Math.floor(date.getTime() / 1000)
|
||||
@@ -124,12 +125,12 @@ export const debouncedTimeout = (intervalMs = 1000, task?: () => void) => {
|
||||
timeout && clearTimeout(timeout)
|
||||
timeout = undefined
|
||||
},
|
||||
setTask: (newTask: () => void) => task = newTask,
|
||||
setInterval: (newInterval: number) => intervalMs = newInterval
|
||||
setTask: (newTask: () => void) => (task = newTask),
|
||||
setInterval: (newInterval: number) => (intervalMs = newInterval)
|
||||
}
|
||||
}
|
||||
|
||||
export const delay = (ms: number) => delayCancellable (ms).delay
|
||||
export const delay = (ms: number) => delayCancellable(ms).delay
|
||||
|
||||
export const delayCancellable = (ms: number) => {
|
||||
const stack = new Error().stack
|
||||
@@ -140,7 +141,7 @@ export const delayCancellable = (ms: number) => {
|
||||
reject = _reject
|
||||
})
|
||||
const cancel = () => {
|
||||
clearTimeout (timeout)
|
||||
clearTimeout(timeout)
|
||||
reject(
|
||||
new Boom('Cancelled', {
|
||||
statusCode: 500,
|
||||
@@ -154,29 +155,33 @@ export const delayCancellable = (ms: number) => {
|
||||
return { delay, cancel }
|
||||
}
|
||||
|
||||
export async function promiseTimeout<T>(ms: number | undefined, promise: (resolve: (v: T) => void, reject: (error) => void) => void) {
|
||||
if(!ms) {
|
||||
export async function promiseTimeout<T>(
|
||||
ms: number | undefined,
|
||||
promise: (resolve: (v: T) => void, reject: (error) => void) => void
|
||||
) {
|
||||
if (!ms) {
|
||||
return new Promise(promise)
|
||||
}
|
||||
|
||||
const stack = new Error().stack
|
||||
// Create a promise that rejects in <ms> milliseconds
|
||||
const { delay, cancel } = delayCancellable (ms)
|
||||
const { delay, cancel } = delayCancellable(ms)
|
||||
const p = new Promise((resolve, reject) => {
|
||||
delay
|
||||
.then(() => reject(
|
||||
new Boom('Timed Out', {
|
||||
statusCode: DisconnectReason.timedOut,
|
||||
data: {
|
||||
stack
|
||||
}
|
||||
})
|
||||
))
|
||||
.catch (err => reject(err))
|
||||
.then(() =>
|
||||
reject(
|
||||
new Boom('Timed Out', {
|
||||
statusCode: DisconnectReason.timedOut,
|
||||
data: {
|
||||
stack
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
.catch(err => reject(err))
|
||||
|
||||
promise (resolve, reject)
|
||||
})
|
||||
.finally (cancel)
|
||||
promise(resolve, reject)
|
||||
}).finally(cancel)
|
||||
return p as Promise<T>
|
||||
}
|
||||
|
||||
@@ -186,9 +191,9 @@ export const generateMessageIDV2 = (userId?: string): string => {
|
||||
const data = Buffer.alloc(8 + 20 + 16)
|
||||
data.writeBigUInt64BE(BigInt(Math.floor(Date.now() / 1000)))
|
||||
|
||||
if(userId) {
|
||||
if (userId) {
|
||||
const id = jidDecode(userId)
|
||||
if(id?.user) {
|
||||
if (id?.user) {
|
||||
data.write(id.user, 8)
|
||||
data.write('@c.us', 8 + id.user.length)
|
||||
}
|
||||
@@ -205,37 +210,30 @@ export const generateMessageIDV2 = (userId?: string): string => {
|
||||
export const generateMessageID = () => '3EB0' + randomBytes(18).toString('hex').toUpperCase()
|
||||
|
||||
export function bindWaitForEvent<T extends keyof BaileysEventMap>(ev: BaileysEventEmitter, event: T) {
|
||||
return async(check: (u: BaileysEventMap[T]) => Promise<boolean | undefined>, timeoutMs?: number) => {
|
||||
return async (check: (u: BaileysEventMap[T]) => Promise<boolean | undefined>, timeoutMs?: number) => {
|
||||
let listener: (item: BaileysEventMap[T]) => void
|
||||
let closeListener: (state: Partial<ConnectionState>) => void
|
||||
await (
|
||||
promiseTimeout<void>(
|
||||
timeoutMs,
|
||||
(resolve, reject) => {
|
||||
closeListener = ({ connection, lastDisconnect }) => {
|
||||
if(connection === 'close') {
|
||||
reject(
|
||||
lastDisconnect?.error
|
||||
|| new Boom('Connection Closed', { statusCode: DisconnectReason.connectionClosed })
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
ev.on('connection.update', closeListener)
|
||||
listener = async(update) => {
|
||||
if(await check(update)) {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
|
||||
ev.on(event, listener)
|
||||
await promiseTimeout<void>(timeoutMs, (resolve, reject) => {
|
||||
closeListener = ({ connection, lastDisconnect }) => {
|
||||
if (connection === 'close') {
|
||||
reject(
|
||||
lastDisconnect?.error || new Boom('Connection Closed', { statusCode: DisconnectReason.connectionClosed })
|
||||
)
|
||||
}
|
||||
)
|
||||
.finally(() => {
|
||||
ev.off(event, listener)
|
||||
ev.off('connection.update', closeListener)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
ev.on('connection.update', closeListener)
|
||||
listener = async update => {
|
||||
if (await check(update)) {
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
|
||||
ev.on(event, listener)
|
||||
}).finally(() => {
|
||||
ev.off(event, listener)
|
||||
ev.off('connection.update', closeListener)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -245,21 +243,18 @@ export const bindWaitForConnectionUpdate = (ev: BaileysEventEmitter) => bindWait
|
||||
* utility that fetches latest baileys version from the master branch.
|
||||
* Use to ensure your WA connection is always on the latest version
|
||||
*/
|
||||
export const fetchLatestBaileysVersion = async(options: AxiosRequestConfig<{}> = { }) => {
|
||||
export const fetchLatestBaileysVersion = async (options: AxiosRequestConfig<{}> = {}) => {
|
||||
const URL = 'https://raw.githubusercontent.com/WhiskeySockets/Baileys/master/src/Defaults/baileys-version.json'
|
||||
try {
|
||||
const result = await axios.get<{ version: WAVersion }>(
|
||||
URL,
|
||||
{
|
||||
...options,
|
||||
responseType: 'json'
|
||||
}
|
||||
)
|
||||
const result = await axios.get<{ version: WAVersion }>(URL, {
|
||||
...options,
|
||||
responseType: 'json'
|
||||
})
|
||||
return {
|
||||
version: result.data.version,
|
||||
isLatest: true
|
||||
}
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
return {
|
||||
version: baileysVersion as WAVersion,
|
||||
isLatest: false,
|
||||
@@ -272,20 +267,17 @@ export const fetchLatestBaileysVersion = async(options: AxiosRequestConfig<{}> =
|
||||
* A utility that fetches the latest web version of whatsapp.
|
||||
* Use to ensure your WA connection is always on the latest version
|
||||
*/
|
||||
export const fetchLatestWaWebVersion = async(options: AxiosRequestConfig<{}>) => {
|
||||
export const fetchLatestWaWebVersion = async (options: AxiosRequestConfig<{}>) => {
|
||||
try {
|
||||
const { data } = await axios.get(
|
||||
'https://web.whatsapp.com/sw.js',
|
||||
{
|
||||
...options,
|
||||
responseType: 'json'
|
||||
}
|
||||
)
|
||||
const { data } = await axios.get('https://web.whatsapp.com/sw.js', {
|
||||
...options,
|
||||
responseType: 'json'
|
||||
})
|
||||
|
||||
const regex = /\\?"client_revision\\?":\s*(\d+)/
|
||||
const match = data.match(regex)
|
||||
|
||||
if(!match?.[1]) {
|
||||
if (!match?.[1]) {
|
||||
return {
|
||||
version: baileysVersion as WAVersion,
|
||||
isLatest: false,
|
||||
@@ -301,7 +293,7 @@ export const fetchLatestWaWebVersion = async(options: AxiosRequestConfig<{}>) =>
|
||||
version: [2, 3000, +clientRevision] as WAVersion,
|
||||
isLatest: true
|
||||
}
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
return {
|
||||
version: baileysVersion as WAVersion,
|
||||
isLatest: false,
|
||||
@@ -317,9 +309,9 @@ export const generateMdTagPrefix = () => {
|
||||
}
|
||||
|
||||
const STATUS_MAP: { [_: string]: proto.WebMessageInfo.Status } = {
|
||||
'sender': proto.WebMessageInfo.Status.SERVER_ACK,
|
||||
'played': proto.WebMessageInfo.Status.PLAYED,
|
||||
'read': proto.WebMessageInfo.Status.READ,
|
||||
sender: proto.WebMessageInfo.Status.SERVER_ACK,
|
||||
played: proto.WebMessageInfo.Status.PLAYED,
|
||||
read: proto.WebMessageInfo.Status.READ,
|
||||
'read-self': proto.WebMessageInfo.Status.READ
|
||||
}
|
||||
/**
|
||||
@@ -328,7 +320,7 @@ const STATUS_MAP: { [_: string]: proto.WebMessageInfo.Status } = {
|
||||
*/
|
||||
export const getStatusFromReceiptType = (type: string | undefined) => {
|
||||
const status = STATUS_MAP[type!]
|
||||
if(typeof type === 'undefined') {
|
||||
if (typeof type === 'undefined') {
|
||||
return proto.WebMessageInfo.Status.DELIVERY_ACK
|
||||
}
|
||||
|
||||
@@ -348,7 +340,7 @@ export const getErrorCodeFromStreamError = (node: BinaryNode) => {
|
||||
let reason = reasonNode?.tag || 'unknown'
|
||||
const statusCode = +(node.attrs.code || CODE_MAP[reason] || DisconnectReason.badSession)
|
||||
|
||||
if(statusCode === DisconnectReason.restartRequired) {
|
||||
if (statusCode === DisconnectReason.restartRequired) {
|
||||
reason = 'restart required'
|
||||
}
|
||||
|
||||
@@ -361,28 +353,28 @@ export const getErrorCodeFromStreamError = (node: BinaryNode) => {
|
||||
export const getCallStatusFromNode = ({ tag, attrs }: BinaryNode) => {
|
||||
let status: WACallUpdateType
|
||||
switch (tag) {
|
||||
case 'offer':
|
||||
case 'offer_notice':
|
||||
status = 'offer'
|
||||
break
|
||||
case 'terminate':
|
||||
if(attrs.reason === 'timeout') {
|
||||
status = 'timeout'
|
||||
} else {
|
||||
//fired when accepted/rejected/timeout/caller hangs up
|
||||
status = 'terminate'
|
||||
}
|
||||
case 'offer':
|
||||
case 'offer_notice':
|
||||
status = 'offer'
|
||||
break
|
||||
case 'terminate':
|
||||
if (attrs.reason === 'timeout') {
|
||||
status = 'timeout'
|
||||
} else {
|
||||
//fired when accepted/rejected/timeout/caller hangs up
|
||||
status = 'terminate'
|
||||
}
|
||||
|
||||
break
|
||||
case 'reject':
|
||||
status = 'reject'
|
||||
break
|
||||
case 'accept':
|
||||
status = 'accept'
|
||||
break
|
||||
default:
|
||||
status = 'ringing'
|
||||
break
|
||||
break
|
||||
case 'reject':
|
||||
status = 'reject'
|
||||
break
|
||||
case 'accept':
|
||||
status = 'accept'
|
||||
break
|
||||
default:
|
||||
status = 'ringing'
|
||||
break
|
||||
}
|
||||
|
||||
return status
|
||||
@@ -392,16 +384,17 @@ const UNEXPECTED_SERVER_CODE_TEXT = 'Unexpected server response: '
|
||||
|
||||
export const getCodeFromWSError = (error: Error) => {
|
||||
let statusCode = 500
|
||||
if(error?.message?.includes(UNEXPECTED_SERVER_CODE_TEXT)) {
|
||||
if (error?.message?.includes(UNEXPECTED_SERVER_CODE_TEXT)) {
|
||||
const code = +error?.message.slice(UNEXPECTED_SERVER_CODE_TEXT.length)
|
||||
if(!Number.isNaN(code) && code >= 400) {
|
||||
if (!Number.isNaN(code) && code >= 400) {
|
||||
statusCode = code
|
||||
}
|
||||
} else if(
|
||||
} else if (
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
(error as any)?.code?.startsWith('E')
|
||||
|| error?.message?.includes('timed out')
|
||||
) { // handle ETIMEOUT, ENOTFOUND etc
|
||||
(error as any)?.code?.startsWith('E') ||
|
||||
error?.message?.includes('timed out')
|
||||
) {
|
||||
// handle ETIMEOUT, ENOTFOUND etc
|
||||
statusCode = 408
|
||||
}
|
||||
|
||||
@@ -417,9 +410,9 @@ export const isWABusinessPlatform = (platform: string) => {
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function trimUndefined(obj: {[_: string]: any}) {
|
||||
for(const key in obj) {
|
||||
if(typeof obj[key] === 'undefined') {
|
||||
export function trimUndefined(obj: { [_: string]: any }) {
|
||||
for (const key in obj) {
|
||||
if (typeof obj[key] === 'undefined') {
|
||||
delete obj[key]
|
||||
}
|
||||
}
|
||||
@@ -434,17 +427,17 @@ export function bytesToCrockford(buffer: Buffer): string {
|
||||
let bitCount = 0
|
||||
const crockford: string[] = []
|
||||
|
||||
for(const element of buffer) {
|
||||
for (const element of buffer) {
|
||||
value = (value << 8) | (element & 0xff)
|
||||
bitCount += 8
|
||||
|
||||
while(bitCount >= 5) {
|
||||
while (bitCount >= 5) {
|
||||
crockford.push(CROCKFORD_CHARACTERS.charAt((value >>> (bitCount - 5)) & 31))
|
||||
bitCount -= 5
|
||||
}
|
||||
}
|
||||
|
||||
if(bitCount > 0) {
|
||||
if (bitCount > 0) {
|
||||
crockford.push(CROCKFORD_CHARACTERS.charAt((value << (5 - bitCount)) & 31))
|
||||
}
|
||||
|
||||
|
||||
@@ -10,10 +10,7 @@ import { downloadContentFromMessage } from './messages-media'
|
||||
|
||||
const inflatePromise = promisify(inflate)
|
||||
|
||||
export const downloadHistory = async(
|
||||
msg: proto.Message.IHistorySyncNotification,
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
export const downloadHistory = async (msg: proto.Message.IHistorySyncNotification, options: AxiosRequestConfig<{}>) => {
|
||||
const stream = await downloadContentFromMessage(msg, 'md-msg-hist', { options })
|
||||
const bufferArray: Buffer[] = []
|
||||
for await (const chunk of stream) {
|
||||
@@ -35,59 +32,58 @@ export const processHistoryMessage = (item: proto.IHistorySync) => {
|
||||
const chats: Chat[] = []
|
||||
|
||||
switch (item.syncType) {
|
||||
case proto.HistorySync.HistorySyncType.INITIAL_BOOTSTRAP:
|
||||
case proto.HistorySync.HistorySyncType.RECENT:
|
||||
case proto.HistorySync.HistorySyncType.FULL:
|
||||
case proto.HistorySync.HistorySyncType.ON_DEMAND:
|
||||
for(const chat of item.conversations! as Chat[]) {
|
||||
contacts.push({ id: chat.id, name: chat.name || undefined })
|
||||
case proto.HistorySync.HistorySyncType.INITIAL_BOOTSTRAP:
|
||||
case proto.HistorySync.HistorySyncType.RECENT:
|
||||
case proto.HistorySync.HistorySyncType.FULL:
|
||||
case proto.HistorySync.HistorySyncType.ON_DEMAND:
|
||||
for (const chat of item.conversations! as Chat[]) {
|
||||
contacts.push({ id: chat.id, name: chat.name || undefined })
|
||||
|
||||
const msgs = chat.messages || []
|
||||
delete chat.messages
|
||||
delete chat.archived
|
||||
delete chat.muteEndTime
|
||||
delete chat.pinned
|
||||
const msgs = chat.messages || []
|
||||
delete chat.messages
|
||||
delete chat.archived
|
||||
delete chat.muteEndTime
|
||||
delete chat.pinned
|
||||
|
||||
for(const item of msgs) {
|
||||
const message = item.message!
|
||||
messages.push(message)
|
||||
for (const item of msgs) {
|
||||
const message = item.message!
|
||||
messages.push(message)
|
||||
|
||||
if(!chat.messages?.length) {
|
||||
// keep only the most recent message in the chat array
|
||||
chat.messages = [{ message }]
|
||||
if (!chat.messages?.length) {
|
||||
// keep only the most recent message in the chat array
|
||||
chat.messages = [{ message }]
|
||||
}
|
||||
|
||||
if (!message.key.fromMe && !chat.lastMessageRecvTimestamp) {
|
||||
chat.lastMessageRecvTimestamp = toNumber(message.messageTimestamp)
|
||||
}
|
||||
|
||||
if (
|
||||
(message.messageStubType === WAMessageStubType.BIZ_PRIVACY_MODE_TO_BSP ||
|
||||
message.messageStubType === WAMessageStubType.BIZ_PRIVACY_MODE_TO_FB) &&
|
||||
message.messageStubParameters?.[0]
|
||||
) {
|
||||
contacts.push({
|
||||
id: message.key.participant || message.key.remoteJid!,
|
||||
verifiedName: message.messageStubParameters?.[0]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if(!message.key.fromMe && !chat.lastMessageRecvTimestamp) {
|
||||
chat.lastMessageRecvTimestamp = toNumber(message.messageTimestamp)
|
||||
if (isJidUser(chat.id) && chat.readOnly && chat.archived) {
|
||||
delete chat.readOnly
|
||||
}
|
||||
|
||||
if(
|
||||
(message.messageStubType === WAMessageStubType.BIZ_PRIVACY_MODE_TO_BSP
|
||||
|| message.messageStubType === WAMessageStubType.BIZ_PRIVACY_MODE_TO_FB
|
||||
)
|
||||
&& message.messageStubParameters?.[0]
|
||||
) {
|
||||
contacts.push({
|
||||
id: message.key.participant || message.key.remoteJid!,
|
||||
verifiedName: message.messageStubParameters?.[0],
|
||||
})
|
||||
}
|
||||
chats.push({ ...chat })
|
||||
}
|
||||
|
||||
if(isJidUser(chat.id) && chat.readOnly && chat.archived) {
|
||||
delete chat.readOnly
|
||||
break
|
||||
case proto.HistorySync.HistorySyncType.PUSH_NAME:
|
||||
for (const c of item.pushnames!) {
|
||||
contacts.push({ id: c.id!, notify: c.pushname! })
|
||||
}
|
||||
|
||||
chats.push({ ...chat })
|
||||
}
|
||||
|
||||
break
|
||||
case proto.HistorySync.HistorySyncType.PUSH_NAME:
|
||||
for(const c of item.pushnames!) {
|
||||
contacts.push({ id: c.id!, notify: c.pushname! })
|
||||
}
|
||||
|
||||
break
|
||||
break
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -99,7 +95,7 @@ export const processHistoryMessage = (item: proto.IHistorySync) => {
|
||||
}
|
||||
}
|
||||
|
||||
export const downloadAndProcessHistorySyncNotification = async(
|
||||
export const downloadAndProcessHistorySyncNotification = async (
|
||||
msg: proto.Message.IHistorySyncNotification,
|
||||
options: AxiosRequestConfig<{}>
|
||||
) => {
|
||||
@@ -112,4 +108,4 @@ export const getHistoryMsg = (message: proto.IMessage) => {
|
||||
const anyHistoryMsg = normalizedContent?.protocolMessage?.historySyncNotification
|
||||
|
||||
return anyHistoryMsg
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,10 +7,7 @@ import { extractImageThumb, getHttpStream } from './messages-media'
|
||||
const THUMBNAIL_WIDTH_PX = 192
|
||||
|
||||
/** Fetches an image and generates a thumbnail for it */
|
||||
const getCompressedJpegThumbnail = async(
|
||||
url: string,
|
||||
{ thumbnailWidth, fetchOpts }: URLGenerationOptions
|
||||
) => {
|
||||
const getCompressedJpegThumbnail = async (url: string, { thumbnailWidth, fetchOpts }: URLGenerationOptions) => {
|
||||
const stream = await getHttpStream(url, fetchOpts)
|
||||
const result = await extractImageThumb(stream, thumbnailWidth)
|
||||
return result
|
||||
@@ -34,12 +31,12 @@ export type URLGenerationOptions = {
|
||||
* @param text first matched URL in text
|
||||
* @returns the URL info required to generate link preview
|
||||
*/
|
||||
export const getUrlInfo = async(
|
||||
export const getUrlInfo = async (
|
||||
text: string,
|
||||
opts: URLGenerationOptions = {
|
||||
thumbnailWidth: THUMBNAIL_WIDTH_PX,
|
||||
fetchOpts: { timeout: 3000 }
|
||||
},
|
||||
}
|
||||
): Promise<WAUrlInfo | undefined> => {
|
||||
try {
|
||||
// retries
|
||||
@@ -48,7 +45,7 @@ export const getUrlInfo = async(
|
||||
|
||||
const { getLinkPreview } = await import('link-preview-js')
|
||||
let previewLink = text
|
||||
if(!text.startsWith('https://') && !text.startsWith('http://')) {
|
||||
if (!text.startsWith('https://') && !text.startsWith('http://')) {
|
||||
previewLink = 'https://' + previewLink
|
||||
}
|
||||
|
||||
@@ -58,14 +55,14 @@ export const getUrlInfo = async(
|
||||
handleRedirects: (baseURL: string, forwardedURL: string) => {
|
||||
const urlObj = new URL(baseURL)
|
||||
const forwardedURLObj = new URL(forwardedURL)
|
||||
if(retries >= maxRetry) {
|
||||
if (retries >= maxRetry) {
|
||||
return false
|
||||
}
|
||||
|
||||
if(
|
||||
forwardedURLObj.hostname === urlObj.hostname
|
||||
|| forwardedURLObj.hostname === 'www.' + urlObj.hostname
|
||||
|| 'www.' + forwardedURLObj.hostname === urlObj.hostname
|
||||
if (
|
||||
forwardedURLObj.hostname === urlObj.hostname ||
|
||||
forwardedURLObj.hostname === 'www.' + urlObj.hostname ||
|
||||
'www.' + forwardedURLObj.hostname === urlObj.hostname
|
||||
) {
|
||||
retries + 1
|
||||
return true
|
||||
@@ -75,7 +72,7 @@ export const getUrlInfo = async(
|
||||
},
|
||||
headers: opts.fetchOpts as {}
|
||||
})
|
||||
if(info && 'title' in info && info.title) {
|
||||
if (info && 'title' in info && info.title) {
|
||||
const [image] = info.images
|
||||
|
||||
const urlInfo: WAUrlInfo = {
|
||||
@@ -86,7 +83,7 @@ export const getUrlInfo = async(
|
||||
originalThumbnailUrl: image
|
||||
}
|
||||
|
||||
if(opts.uploadImage) {
|
||||
if (opts.uploadImage) {
|
||||
const { imageMessage } = await prepareWAMessageMedia(
|
||||
{ image: { url: image } },
|
||||
{
|
||||
@@ -95,28 +92,21 @@ export const getUrlInfo = async(
|
||||
options: opts.fetchOpts
|
||||
}
|
||||
)
|
||||
urlInfo.jpegThumbnail = imageMessage?.jpegThumbnail
|
||||
? Buffer.from(imageMessage.jpegThumbnail)
|
||||
: undefined
|
||||
urlInfo.jpegThumbnail = imageMessage?.jpegThumbnail ? Buffer.from(imageMessage.jpegThumbnail) : undefined
|
||||
urlInfo.highQualityThumbnail = imageMessage || undefined
|
||||
} else {
|
||||
try {
|
||||
urlInfo.jpegThumbnail = image
|
||||
? (await getCompressedJpegThumbnail(image, opts)).buffer
|
||||
: undefined
|
||||
} catch(error) {
|
||||
opts.logger?.debug(
|
||||
{ err: error.stack, url: previewLink },
|
||||
'error in generating thumbnail'
|
||||
)
|
||||
urlInfo.jpegThumbnail = image ? (await getCompressedJpegThumbnail(image, opts)).buffer : undefined
|
||||
} catch (error) {
|
||||
opts.logger?.debug({ err: error.stack, url: previewLink }, 'error in generating thumbnail')
|
||||
}
|
||||
}
|
||||
|
||||
return urlInfo
|
||||
}
|
||||
} catch(error) {
|
||||
if(!error.message.includes('receive a valid')) {
|
||||
} catch (error) {
|
||||
if (!error.message.includes('receive a valid')) {
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import P from 'pino'
|
||||
|
||||
export interface ILogger {
|
||||
level: string
|
||||
child(obj: Record<string, unknown>): ILogger
|
||||
trace(obj: unknown, msg?: string)
|
||||
debug(obj: unknown, msg?: string)
|
||||
info(obj: unknown, msg?: string)
|
||||
warn(obj: unknown, msg?: string)
|
||||
error(obj: unknown, msg?: string)
|
||||
level: string
|
||||
child(obj: Record<string, unknown>): ILogger
|
||||
trace(obj: unknown, msg?: string)
|
||||
debug(obj: unknown, msg?: string)
|
||||
info(obj: unknown, msg?: string)
|
||||
warn(obj: unknown, msg?: string)
|
||||
error(obj: unknown, msg?: string)
|
||||
}
|
||||
|
||||
export default P({ timestamp: () => `,"time":"${new Date().toJSON()}"` })
|
||||
|
||||
@@ -9,7 +9,6 @@ import { hkdf } from './crypto'
|
||||
const o = 128
|
||||
|
||||
class d {
|
||||
|
||||
salt: string
|
||||
|
||||
constructor(e: string) {
|
||||
@@ -17,7 +16,7 @@ class d {
|
||||
}
|
||||
add(e, t) {
|
||||
var r = this
|
||||
for(const item of t) {
|
||||
for (const item of t) {
|
||||
e = r._addSingle(e, item)
|
||||
}
|
||||
|
||||
@@ -25,7 +24,7 @@ class d {
|
||||
}
|
||||
subtract(e, t) {
|
||||
var r = this
|
||||
for(const item of t) {
|
||||
for (const item of t) {
|
||||
e = r._subtractSingle(e, item)
|
||||
}
|
||||
|
||||
@@ -38,20 +37,20 @@ class d {
|
||||
async _addSingle(e, t) {
|
||||
var r = this
|
||||
const n = new Uint8Array(await hkdf(Buffer.from(t), o, { info: r.salt })).buffer
|
||||
return r.performPointwiseWithOverflow(await e, n, ((e, t) => e + t))
|
||||
return r.performPointwiseWithOverflow(await e, n, (e, t) => e + t)
|
||||
}
|
||||
async _subtractSingle(e, t) {
|
||||
var r = this
|
||||
|
||||
const n = new Uint8Array(await hkdf(Buffer.from(t), o, { info: r.salt })).buffer
|
||||
return r.performPointwiseWithOverflow(await e, n, ((e, t) => e - t))
|
||||
return r.performPointwiseWithOverflow(await e, n, (e, t) => e - t)
|
||||
}
|
||||
performPointwiseWithOverflow(e, t, r) {
|
||||
const n = new DataView(e)
|
||||
, i = new DataView(t)
|
||||
, a = new ArrayBuffer(n.byteLength)
|
||||
, s = new DataView(a)
|
||||
for(let e = 0; e < n.byteLength; e += 2) {
|
||||
const n = new DataView(e),
|
||||
i = new DataView(t),
|
||||
a = new ArrayBuffer(n.byteLength),
|
||||
s = new DataView(a)
|
||||
for (let e = 0; e < n.byteLength; e += 2) {
|
||||
s.setUint16(e, r(n.getUint16(e, !0), i.getUint16(e, !0)), !0)
|
||||
}
|
||||
|
||||
|
||||
@@ -6,12 +6,12 @@ export const makeMutex = () => {
|
||||
|
||||
return {
|
||||
mutex<T>(code: () => Promise<T> | T): Promise<T> {
|
||||
task = (async() => {
|
||||
task = (async () => {
|
||||
// wait for the previous task to complete
|
||||
// if there is an error, we swallow so as to not block the queue
|
||||
try {
|
||||
await task
|
||||
} catch{ }
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
// execute the current task
|
||||
@@ -24,7 +24,7 @@ export const makeMutex = () => {
|
||||
// we replace the existing task, appending the new piece of execution to it
|
||||
// so the next task will have to wait for this one to finish
|
||||
return task
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,11 +35,11 @@ export const makeKeyedMutex = () => {
|
||||
|
||||
return {
|
||||
mutex<T>(key: string, task: () => Promise<T> | T): Promise<T> {
|
||||
if(!map[key]) {
|
||||
if (!map[key]) {
|
||||
map[key] = makeMutex()
|
||||
}
|
||||
|
||||
return map[key].mutex(task)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,20 @@ import { Readable, Transform } from 'stream'
|
||||
import { URL } from 'url'
|
||||
import { proto } from '../../WAProto'
|
||||
import { DEFAULT_ORIGIN, MEDIA_HKDF_KEY_MAPPING, MEDIA_PATH_MAP } from '../Defaults'
|
||||
import { BaileysEventMap, DownloadableMessage, MediaConnInfo, MediaDecryptionKeyInfo, MediaType, MessageType, SocketConfig, WAGenericMediaMessage, WAMediaPayloadURL, WAMediaUpload, WAMediaUploadFunction, WAMessageContent } from '../Types'
|
||||
import {
|
||||
BaileysEventMap,
|
||||
DownloadableMessage,
|
||||
MediaConnInfo,
|
||||
MediaDecryptionKeyInfo,
|
||||
MediaType,
|
||||
MessageType,
|
||||
SocketConfig,
|
||||
WAGenericMediaMessage,
|
||||
WAMediaPayloadURL,
|
||||
WAMediaUpload,
|
||||
WAMediaUploadFunction,
|
||||
WAMessageContent
|
||||
} from '../Types'
|
||||
import { BinaryNode, getBinaryNodeChild, getBinaryNodeChildBuffer, jidNormalizedUser } from '../WABinary'
|
||||
import { aesDecryptGCM, aesEncryptGCM, hkdf } from './crypto'
|
||||
import { generateMessageIDV2 } from './generics'
|
||||
@@ -19,30 +32,24 @@ import { ILogger } from './logger'
|
||||
|
||||
const getTmpFilesDirectory = () => tmpdir()
|
||||
|
||||
const getImageProcessingLibrary = async() => {
|
||||
const getImageProcessingLibrary = async () => {
|
||||
const [_jimp, sharp] = await Promise.all([
|
||||
(async() => {
|
||||
const jimp = await (
|
||||
import('jimp')
|
||||
.catch(() => { })
|
||||
)
|
||||
(async () => {
|
||||
const jimp = await import('jimp').catch(() => {})
|
||||
return jimp
|
||||
})(),
|
||||
(async() => {
|
||||
const sharp = await (
|
||||
import('sharp')
|
||||
.catch(() => { })
|
||||
)
|
||||
(async () => {
|
||||
const sharp = await import('sharp').catch(() => {})
|
||||
return sharp
|
||||
})()
|
||||
])
|
||||
|
||||
if(sharp) {
|
||||
if (sharp) {
|
||||
return { sharp }
|
||||
}
|
||||
|
||||
const jimp = _jimp?.default || _jimp
|
||||
if(jimp) {
|
||||
if (jimp) {
|
||||
return { jimp }
|
||||
}
|
||||
|
||||
@@ -55,12 +62,15 @@ export const hkdfInfoKey = (type: MediaType) => {
|
||||
}
|
||||
|
||||
/** generates all the keys required to encrypt/decrypt & sign a media message */
|
||||
export async function getMediaKeys(buffer: Uint8Array | string | null | undefined, mediaType: MediaType): Promise<MediaDecryptionKeyInfo> {
|
||||
if(!buffer) {
|
||||
export async function getMediaKeys(
|
||||
buffer: Uint8Array | string | null | undefined,
|
||||
mediaType: MediaType
|
||||
): Promise<MediaDecryptionKeyInfo> {
|
||||
if (!buffer) {
|
||||
throw new Boom('Cannot derive from empty media key')
|
||||
}
|
||||
|
||||
if(typeof buffer === 'string') {
|
||||
if (typeof buffer === 'string') {
|
||||
buffer = Buffer.from(buffer.replace('data:;base64,', ''), 'base64')
|
||||
}
|
||||
|
||||
@@ -69,49 +79,47 @@ export async function getMediaKeys(buffer: Uint8Array | string | null | undefine
|
||||
return {
|
||||
iv: expandedMediaKey.slice(0, 16),
|
||||
cipherKey: expandedMediaKey.slice(16, 48),
|
||||
macKey: expandedMediaKey.slice(48, 80),
|
||||
macKey: expandedMediaKey.slice(48, 80)
|
||||
}
|
||||
}
|
||||
|
||||
/** Extracts video thumb using FFMPEG */
|
||||
const extractVideoThumb = async(
|
||||
const extractVideoThumb = async (
|
||||
path: string,
|
||||
destPath: string,
|
||||
time: string,
|
||||
size: { width: number, height: number },
|
||||
) => new Promise<void>((resolve, reject) => {
|
||||
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
||||
exec(cmd, (err) => {
|
||||
if(err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
size: { width: number; height: number }
|
||||
) =>
|
||||
new Promise<void>((resolve, reject) => {
|
||||
const cmd = `ffmpeg -ss ${time} -i ${path} -y -vf scale=${size.width}:-1 -vframes 1 -f image2 ${destPath}`
|
||||
exec(cmd, err => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve()
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | string, width = 32) => {
|
||||
if(bufferOrFilePath instanceof Readable) {
|
||||
export const extractImageThumb = async (bufferOrFilePath: Readable | Buffer | string, width = 32) => {
|
||||
if (bufferOrFilePath instanceof Readable) {
|
||||
bufferOrFilePath = await toBuffer(bufferOrFilePath)
|
||||
}
|
||||
|
||||
const lib = await getImageProcessingLibrary()
|
||||
if('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
const img = lib.sharp.default(bufferOrFilePath)
|
||||
const dimensions = await img.metadata()
|
||||
|
||||
const buffer = await img
|
||||
.resize(width)
|
||||
.jpeg({ quality: 50 })
|
||||
.toBuffer()
|
||||
const buffer = await img.resize(width).jpeg({ quality: 50 }).toBuffer()
|
||||
return {
|
||||
buffer,
|
||||
original: {
|
||||
width: dimensions.width,
|
||||
height: dimensions.height,
|
||||
},
|
||||
height: dimensions.height
|
||||
}
|
||||
}
|
||||
} else if('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
} else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
const { read, MIME_JPEG, RESIZE_BILINEAR, AUTO } = lib.jimp
|
||||
|
||||
const jimp = await read(bufferOrFilePath as string)
|
||||
@@ -119,10 +127,7 @@ export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | str
|
||||
width: jimp.getWidth(),
|
||||
height: jimp.getHeight()
|
||||
}
|
||||
const buffer = await jimp
|
||||
.quality(50)
|
||||
.resize(width, AUTO, RESIZE_BILINEAR)
|
||||
.getBufferAsync(MIME_JPEG)
|
||||
const buffer = await jimp.quality(50).resize(width, AUTO, RESIZE_BILINEAR).getBufferAsync(MIME_JPEG)
|
||||
return {
|
||||
buffer,
|
||||
original: dimensions
|
||||
@@ -132,20 +137,14 @@ export const extractImageThumb = async(bufferOrFilePath: Readable | Buffer | str
|
||||
}
|
||||
}
|
||||
|
||||
export const encodeBase64EncodedStringForUpload = (b64: string) => (
|
||||
encodeURIComponent(
|
||||
b64
|
||||
.replace(/\+/g, '-')
|
||||
.replace(/\//g, '_')
|
||||
.replace(/\=+$/, '')
|
||||
)
|
||||
)
|
||||
export const encodeBase64EncodedStringForUpload = (b64: string) =>
|
||||
encodeURIComponent(b64.replace(/\+/g, '-').replace(/\//g, '_').replace(/\=+$/, ''))
|
||||
|
||||
export const generateProfilePicture = async(mediaUpload: WAMediaUpload) => {
|
||||
export const generateProfilePicture = async (mediaUpload: WAMediaUpload) => {
|
||||
let bufferOrFilePath: Buffer | string
|
||||
if(Buffer.isBuffer(mediaUpload)) {
|
||||
if (Buffer.isBuffer(mediaUpload)) {
|
||||
bufferOrFilePath = mediaUpload
|
||||
} else if('url' in mediaUpload) {
|
||||
} else if ('url' in mediaUpload) {
|
||||
bufferOrFilePath = mediaUpload.url.toString()
|
||||
} else {
|
||||
bufferOrFilePath = await toBuffer(mediaUpload.stream)
|
||||
@@ -153,44 +152,42 @@ export const generateProfilePicture = async(mediaUpload: WAMediaUpload) => {
|
||||
|
||||
const lib = await getImageProcessingLibrary()
|
||||
let img: Promise<Buffer>
|
||||
if('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
img = lib.sharp.default(bufferOrFilePath)
|
||||
if ('sharp' in lib && typeof lib.sharp?.default === 'function') {
|
||||
img = lib.sharp
|
||||
.default(bufferOrFilePath)
|
||||
.resize(640, 640)
|
||||
.jpeg({
|
||||
quality: 50,
|
||||
quality: 50
|
||||
})
|
||||
.toBuffer()
|
||||
} else if('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
} else if ('jimp' in lib && typeof lib.jimp?.read === 'function') {
|
||||
const { read, MIME_JPEG, RESIZE_BILINEAR } = lib.jimp
|
||||
const jimp = await read(bufferOrFilePath as string)
|
||||
const min = Math.min(jimp.getWidth(), jimp.getHeight())
|
||||
const cropped = jimp.crop(0, 0, min, min)
|
||||
|
||||
img = cropped
|
||||
.quality(50)
|
||||
.resize(640, 640, RESIZE_BILINEAR)
|
||||
.getBufferAsync(MIME_JPEG)
|
||||
img = cropped.quality(50).resize(640, 640, RESIZE_BILINEAR).getBufferAsync(MIME_JPEG)
|
||||
} else {
|
||||
throw new Boom('No image processing library available')
|
||||
}
|
||||
|
||||
return {
|
||||
img: await img,
|
||||
img: await img
|
||||
}
|
||||
}
|
||||
|
||||
/** gets the SHA256 of the given media message */
|
||||
export const mediaMessageSHA256B64 = (message: WAMessageContent) => {
|
||||
const media = Object.values(message)[0] as WAGenericMediaMessage
|
||||
return media?.fileSha256 && Buffer.from(media.fileSha256).toString ('base64')
|
||||
return media?.fileSha256 && Buffer.from(media.fileSha256).toString('base64')
|
||||
}
|
||||
|
||||
export async function getAudioDuration(buffer: Buffer | string | Readable) {
|
||||
const musicMetadata = await import('music-metadata')
|
||||
let metadata: IAudioMetadata
|
||||
if(Buffer.isBuffer(buffer)) {
|
||||
if (Buffer.isBuffer(buffer)) {
|
||||
metadata = await musicMetadata.parseBuffer(buffer, undefined, { duration: true })
|
||||
} else if(typeof buffer === 'string') {
|
||||
} else if (typeof buffer === 'string') {
|
||||
const rStream = createReadStream(buffer)
|
||||
try {
|
||||
metadata = await musicMetadata.parseStream(rStream, undefined, { duration: true })
|
||||
@@ -209,11 +206,11 @@ export async function getAudioDuration(buffer: Buffer | string | Readable) {
|
||||
*/
|
||||
export async function getAudioWaveform(buffer: Buffer | string | Readable, logger?: ILogger) {
|
||||
try {
|
||||
const { default: decoder } = await eval('import(\'audio-decode\')')
|
||||
const { default: decoder } = await eval("import('audio-decode')")
|
||||
let audioData: Buffer
|
||||
if(Buffer.isBuffer(buffer)) {
|
||||
if (Buffer.isBuffer(buffer)) {
|
||||
audioData = buffer
|
||||
} else if(typeof buffer === 'string') {
|
||||
} else if (typeof buffer === 'string') {
|
||||
const rStream = createReadStream(buffer)
|
||||
audioData = await toBuffer(rStream)
|
||||
} else {
|
||||
@@ -226,10 +223,10 @@ export async function getAudioWaveform(buffer: Buffer | string | Readable, logge
|
||||
const samples = 64 // Number of samples we want to have in our final data set
|
||||
const blockSize = Math.floor(rawData.length / samples) // the number of samples in each subdivision
|
||||
const filteredData: number[] = []
|
||||
for(let i = 0; i < samples; i++) {
|
||||
const blockStart = blockSize * i // the location of the first sample in the block
|
||||
let sum = 0
|
||||
for(let j = 0; j < blockSize; j++) {
|
||||
for (let i = 0; i < samples; i++) {
|
||||
const blockStart = blockSize * i // the location of the first sample in the block
|
||||
let sum = 0
|
||||
for (let j = 0; j < blockSize; j++) {
|
||||
sum = sum + Math.abs(rawData[blockStart + j]) // find the sum of all the samples in the block
|
||||
}
|
||||
|
||||
@@ -238,20 +235,17 @@ export async function getAudioWaveform(buffer: Buffer | string | Readable, logge
|
||||
|
||||
// This guarantees that the largest data point will be set to 1, and the rest of the data will scale proportionally.
|
||||
const multiplier = Math.pow(Math.max(...filteredData), -1)
|
||||
const normalizedData = filteredData.map((n) => n * multiplier)
|
||||
const normalizedData = filteredData.map(n => n * multiplier)
|
||||
|
||||
// Generate waveform like WhatsApp
|
||||
const waveform = new Uint8Array(
|
||||
normalizedData.map((n) => Math.floor(100 * n))
|
||||
)
|
||||
const waveform = new Uint8Array(normalizedData.map(n => Math.floor(100 * n)))
|
||||
|
||||
return waveform
|
||||
} catch(e) {
|
||||
} catch (e) {
|
||||
logger?.debug('Failed to generate waveform: ' + e)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
export const toReadable = (buffer: Buffer) => {
|
||||
const readable = new Readable({ read: () => {} })
|
||||
readable.push(buffer)
|
||||
@@ -259,7 +253,7 @@ export const toReadable = (buffer: Buffer) => {
|
||||
return readable
|
||||
}
|
||||
|
||||
export const toBuffer = async(stream: Readable) => {
|
||||
export const toBuffer = async (stream: Readable) => {
|
||||
const chunks: Buffer[] = []
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(chunk)
|
||||
@@ -269,16 +263,16 @@ export const toBuffer = async(stream: Readable) => {
|
||||
return Buffer.concat(chunks)
|
||||
}
|
||||
|
||||
export const getStream = async(item: WAMediaUpload, opts?: AxiosRequestConfig) => {
|
||||
if(Buffer.isBuffer(item)) {
|
||||
export const getStream = async (item: WAMediaUpload, opts?: AxiosRequestConfig) => {
|
||||
if (Buffer.isBuffer(item)) {
|
||||
return { stream: toReadable(item), type: 'buffer' } as const
|
||||
}
|
||||
|
||||
if('stream' in item) {
|
||||
if ('stream' in item) {
|
||||
return { stream: item.stream, type: 'readable' } as const
|
||||
}
|
||||
|
||||
if(item.url.toString().startsWith('http://') || item.url.toString().startsWith('https://')) {
|
||||
if (item.url.toString().startsWith('http://') || item.url.toString().startsWith('https://')) {
|
||||
return { stream: await getHttpStream(item.url, opts), type: 'remote' } as const
|
||||
}
|
||||
|
||||
@@ -290,21 +284,21 @@ export async function generateThumbnail(
|
||||
file: string,
|
||||
mediaType: 'video' | 'image',
|
||||
options: {
|
||||
logger?: ILogger
|
||||
}
|
||||
logger?: ILogger
|
||||
}
|
||||
) {
|
||||
let thumbnail: string | undefined
|
||||
let originalImageDimensions: { width: number, height: number } | undefined
|
||||
if(mediaType === 'image') {
|
||||
let originalImageDimensions: { width: number; height: number } | undefined
|
||||
if (mediaType === 'image') {
|
||||
const { buffer, original } = await extractImageThumb(file)
|
||||
thumbnail = buffer.toString('base64')
|
||||
if(original.width && original.height) {
|
||||
if (original.width && original.height) {
|
||||
originalImageDimensions = {
|
||||
width: original.width,
|
||||
height: original.height,
|
||||
height: original.height
|
||||
}
|
||||
}
|
||||
} else if(mediaType === 'video') {
|
||||
} else if (mediaType === 'video') {
|
||||
const imgFilename = join(getTmpFilesDirectory(), generateMessageIDV2() + '.jpg')
|
||||
try {
|
||||
await extractVideoThumb(file, imgFilename, '00:00:00', { width: 32, height: 32 })
|
||||
@@ -312,7 +306,7 @@ export async function generateThumbnail(
|
||||
thumbnail = buff.toString('base64')
|
||||
|
||||
await fs.unlink(imgFilename)
|
||||
} catch(err) {
|
||||
} catch (err) {
|
||||
options.logger?.debug('could not generate video thumb: ' + err)
|
||||
}
|
||||
}
|
||||
@@ -323,7 +317,7 @@ export async function generateThumbnail(
|
||||
}
|
||||
}
|
||||
|
||||
export const getHttpStream = async(url: string | URL, options: AxiosRequestConfig & { isStream?: true } = {}) => {
|
||||
export const getHttpStream = async (url: string | URL, options: AxiosRequestConfig & { isStream?: true } = {}) => {
|
||||
const fetched = await axios.get(url.toString(), { ...options, responseType: 'stream' })
|
||||
return fetched.data as Readable
|
||||
}
|
||||
@@ -334,7 +328,7 @@ type EncryptedStreamOptions = {
|
||||
opts?: AxiosRequestConfig
|
||||
}
|
||||
|
||||
export const encryptedStream = async(
|
||||
export const encryptedStream = async (
|
||||
media: WAMediaUpload,
|
||||
mediaType: MediaType,
|
||||
{ logger, saveOriginalFileIfRequired, opts }: EncryptedStreamOptions = {}
|
||||
@@ -350,9 +344,9 @@ export const encryptedStream = async(
|
||||
let bodyPath: string | undefined
|
||||
let writeStream: WriteStream | undefined
|
||||
let didSaveToTmpPath = false
|
||||
if(type === 'file') {
|
||||
if (type === 'file') {
|
||||
bodyPath = (media as WAMediaPayloadURL).url.toString()
|
||||
} else if(saveOriginalFileIfRequired) {
|
||||
} else if (saveOriginalFileIfRequired) {
|
||||
bodyPath = join(getTmpFilesDirectory(), mediaType + generateMessageIDV2())
|
||||
writeStream = createWriteStream(bodyPath)
|
||||
didSaveToTmpPath = true
|
||||
@@ -368,21 +362,14 @@ export const encryptedStream = async(
|
||||
for await (const data of stream) {
|
||||
fileLength += data.length
|
||||
|
||||
if(
|
||||
type === 'remote'
|
||||
&& opts?.maxContentLength
|
||||
&& fileLength + data.length > opts.maxContentLength
|
||||
) {
|
||||
throw new Boom(
|
||||
`content length exceeded when encrypting "${type}"`,
|
||||
{
|
||||
data: { media, type }
|
||||
}
|
||||
)
|
||||
if (type === 'remote' && opts?.maxContentLength && fileLength + data.length > opts.maxContentLength) {
|
||||
throw new Boom(`content length exceeded when encrypting "${type}"`, {
|
||||
data: { media, type }
|
||||
})
|
||||
}
|
||||
|
||||
sha256Plain = sha256Plain.update(data)
|
||||
if(writeStream && !writeStream.write(data)) {
|
||||
if (writeStream && !writeStream.write(data)) {
|
||||
await once(writeStream, 'drain')
|
||||
}
|
||||
|
||||
@@ -415,7 +402,7 @@ export const encryptedStream = async(
|
||||
fileLength,
|
||||
didSaveToTmpPath
|
||||
}
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
// destroy all streams with error
|
||||
encWriteStream.destroy()
|
||||
writeStream?.destroy()
|
||||
@@ -425,10 +412,10 @@ export const encryptedStream = async(
|
||||
sha256Enc.destroy()
|
||||
stream.destroy()
|
||||
|
||||
if(didSaveToTmpPath) {
|
||||
if (didSaveToTmpPath) {
|
||||
try {
|
||||
await fs.unlink(bodyPath!)
|
||||
} catch(err) {
|
||||
} catch (err) {
|
||||
logger?.error({ err }, 'failed to save to tmp path')
|
||||
}
|
||||
}
|
||||
@@ -451,17 +438,17 @@ const toSmallestChunkSize = (num: number) => {
|
||||
}
|
||||
|
||||
export type MediaDownloadOptions = {
|
||||
startByte?: number
|
||||
endByte?: number
|
||||
startByte?: number
|
||||
endByte?: number
|
||||
options?: AxiosRequestConfig<{}>
|
||||
}
|
||||
|
||||
export const getUrlFromDirectPath = (directPath: string) => `https://${DEF_HOST}${directPath}`
|
||||
|
||||
export const downloadContentFromMessage = async(
|
||||
export const downloadContentFromMessage = async (
|
||||
{ mediaKey, directPath, url }: DownloadableMessage,
|
||||
type: MediaType,
|
||||
opts: MediaDownloadOptions = { }
|
||||
opts: MediaDownloadOptions = {}
|
||||
) => {
|
||||
const downloadUrl = url || getUrlFromDirectPath(directPath!)
|
||||
const keys = await getMediaKeys(mediaKey, type)
|
||||
@@ -473,18 +460,18 @@ export const downloadContentFromMessage = async(
|
||||
* Decrypts and downloads an AES256-CBC encrypted file given the keys.
|
||||
* Assumes the SHA256 of the plaintext is appended to the end of the ciphertext
|
||||
* */
|
||||
export const downloadEncryptedContent = async(
|
||||
export const downloadEncryptedContent = async (
|
||||
downloadUrl: string,
|
||||
{ cipherKey, iv }: MediaDecryptionKeyInfo,
|
||||
{ startByte, endByte, options }: MediaDownloadOptions = { }
|
||||
{ startByte, endByte, options }: MediaDownloadOptions = {}
|
||||
) => {
|
||||
let bytesFetched = 0
|
||||
let startChunk = 0
|
||||
let firstBlockIsIV = false
|
||||
// if a start byte is specified -- then we need to fetch the previous chunk as that will form the IV
|
||||
if(startByte) {
|
||||
if (startByte) {
|
||||
const chunk = toSmallestChunkSize(startByte || 0)
|
||||
if(chunk) {
|
||||
if (chunk) {
|
||||
startChunk = chunk - AES_CHUNK_SIZE
|
||||
bytesFetched = chunk
|
||||
|
||||
@@ -495,33 +482,30 @@ export const downloadEncryptedContent = async(
|
||||
const endChunk = endByte ? toSmallestChunkSize(endByte || 0) + AES_CHUNK_SIZE : undefined
|
||||
|
||||
const headers: AxiosRequestConfig['headers'] = {
|
||||
...options?.headers || { },
|
||||
Origin: DEFAULT_ORIGIN,
|
||||
...(options?.headers || {}),
|
||||
Origin: DEFAULT_ORIGIN
|
||||
}
|
||||
if(startChunk || endChunk) {
|
||||
if (startChunk || endChunk) {
|
||||
headers.Range = `bytes=${startChunk}-`
|
||||
if(endChunk) {
|
||||
if (endChunk) {
|
||||
headers.Range += endChunk
|
||||
}
|
||||
}
|
||||
|
||||
// download the message
|
||||
const fetched = await getHttpStream(
|
||||
downloadUrl,
|
||||
{
|
||||
...options || { },
|
||||
headers,
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
}
|
||||
)
|
||||
const fetched = await getHttpStream(downloadUrl, {
|
||||
...(options || {}),
|
||||
headers,
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity
|
||||
})
|
||||
|
||||
let remainingBytes = Buffer.from([])
|
||||
|
||||
let aes: Crypto.Decipher
|
||||
|
||||
const pushBytes = (bytes: Buffer, push: (bytes: Buffer) => void) => {
|
||||
if(startByte || endByte) {
|
||||
if (startByte || endByte) {
|
||||
const start = bytesFetched >= startByte! ? undefined : Math.max(startByte! - bytesFetched, 0)
|
||||
const end = bytesFetched + bytes.length < endByte! ? undefined : Math.max(endByte! - bytesFetched, 0)
|
||||
|
||||
@@ -541,9 +525,9 @@ export const downloadEncryptedContent = async(
|
||||
remainingBytes = data.slice(decryptLength)
|
||||
data = data.slice(0, decryptLength)
|
||||
|
||||
if(!aes) {
|
||||
if (!aes) {
|
||||
let ivValue = iv
|
||||
if(firstBlockIsIV) {
|
||||
if (firstBlockIsIV) {
|
||||
ivValue = data.slice(0, AES_CHUNK_SIZE)
|
||||
data = data.slice(AES_CHUNK_SIZE)
|
||||
}
|
||||
@@ -551,16 +535,15 @@ export const downloadEncryptedContent = async(
|
||||
aes = Crypto.createDecipheriv('aes-256-cbc', cipherKey, ivValue)
|
||||
// if an end byte that is not EOF is specified
|
||||
// stop auto padding (PKCS7) -- otherwise throws an error for decryption
|
||||
if(endByte) {
|
||||
if (endByte) {
|
||||
aes.setAutoPadding(false)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
pushBytes(aes.update(data), b => this.push(b))
|
||||
callback()
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
@@ -568,10 +551,10 @@ export const downloadEncryptedContent = async(
|
||||
try {
|
||||
pushBytes(aes.final(), b => this.push(b))
|
||||
callback()
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
callback(error)
|
||||
}
|
||||
},
|
||||
}
|
||||
})
|
||||
return fetched.pipe(output, { end: true })
|
||||
}
|
||||
@@ -580,11 +563,7 @@ export function extensionForMediaMessage(message: WAMessageContent) {
|
||||
const getExtension = (mimetype: string) => mimetype.split(';')[0].split('/')[1]
|
||||
const type = Object.keys(message)[0] as MessageType
|
||||
let extension: string
|
||||
if(
|
||||
type === 'locationMessage' ||
|
||||
type === 'liveLocationMessage' ||
|
||||
type === 'productMessage'
|
||||
) {
|
||||
if (type === 'locationMessage' || type === 'liveLocationMessage' || type === 'productMessage') {
|
||||
extension = '.jpeg'
|
||||
} else {
|
||||
const messageContent = message[type] as WAGenericMediaMessage
|
||||
@@ -596,18 +575,18 @@ export function extensionForMediaMessage(message: WAMessageContent) {
|
||||
|
||||
export const getWAUploadToServer = (
|
||||
{ customUploadHosts, fetchAgent, logger, options }: SocketConfig,
|
||||
refreshMediaConn: (force: boolean) => Promise<MediaConnInfo>,
|
||||
refreshMediaConn: (force: boolean) => Promise<MediaConnInfo>
|
||||
): WAMediaUploadFunction => {
|
||||
return async(stream, { mediaType, fileEncSha256B64, timeoutMs }) => {
|
||||
return async (stream, { mediaType, fileEncSha256B64, timeoutMs }) => {
|
||||
// send a query JSON to obtain the url & auth token to upload our media
|
||||
let uploadInfo = await refreshMediaConn(false)
|
||||
|
||||
let urls: { mediaUrl: string, directPath: string } | undefined
|
||||
const hosts = [ ...customUploadHosts, ...uploadInfo.hosts ]
|
||||
let urls: { mediaUrl: string; directPath: string } | undefined
|
||||
const hosts = [...customUploadHosts, ...uploadInfo.hosts]
|
||||
|
||||
fileEncSha256B64 = encodeBase64EncodedStringForUpload(fileEncSha256B64)
|
||||
|
||||
for(const { hostname } of hosts) {
|
||||
for (const { hostname } of hosts) {
|
||||
logger.debug(`uploading to "${hostname}"`)
|
||||
|
||||
const auth = encodeURIComponent(uploadInfo.auth) // the auth token
|
||||
@@ -615,27 +594,22 @@ export const getWAUploadToServer = (
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let result: any
|
||||
try {
|
||||
|
||||
const body = await axios.post(
|
||||
url,
|
||||
stream,
|
||||
{
|
||||
...options,
|
||||
headers: {
|
||||
...options.headers || { },
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'Origin': DEFAULT_ORIGIN
|
||||
},
|
||||
httpsAgent: fetchAgent,
|
||||
timeout: timeoutMs,
|
||||
responseType: 'json',
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity,
|
||||
}
|
||||
)
|
||||
const body = await axios.post(url, stream, {
|
||||
...options,
|
||||
headers: {
|
||||
...(options.headers || {}),
|
||||
'Content-Type': 'application/octet-stream',
|
||||
Origin: DEFAULT_ORIGIN
|
||||
},
|
||||
httpsAgent: fetchAgent,
|
||||
timeout: timeoutMs,
|
||||
responseType: 'json',
|
||||
maxBodyLength: Infinity,
|
||||
maxContentLength: Infinity
|
||||
})
|
||||
result = body.data
|
||||
|
||||
if(result?.url || result?.directPath) {
|
||||
if (result?.url || result?.directPath) {
|
||||
urls = {
|
||||
mediaUrl: result.url,
|
||||
directPath: result.direct_path
|
||||
@@ -645,21 +619,21 @@ export const getWAUploadToServer = (
|
||||
uploadInfo = await refreshMediaConn(true)
|
||||
throw new Error(`upload failed, reason: ${JSON.stringify(result)}`)
|
||||
}
|
||||
} catch(error) {
|
||||
if(axios.isAxiosError(error)) {
|
||||
} catch (error) {
|
||||
if (axios.isAxiosError(error)) {
|
||||
result = error.response?.data
|
||||
}
|
||||
|
||||
const isLast = hostname === hosts[uploadInfo.hosts.length - 1]?.hostname
|
||||
logger.warn({ trace: error.stack, uploadResult: result }, `Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`)
|
||||
logger.warn(
|
||||
{ trace: error.stack, uploadResult: result },
|
||||
`Error in uploading to ${hostname} ${isLast ? '' : ', retrying...'}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if(!urls) {
|
||||
throw new Boom(
|
||||
'Media upload failed on all hosts',
|
||||
{ statusCode: 500 }
|
||||
)
|
||||
if (!urls) {
|
||||
throw new Boom('Media upload failed on all hosts', { statusCode: 500 })
|
||||
}
|
||||
|
||||
return urls
|
||||
@@ -673,11 +647,7 @@ const getMediaRetryKey = (mediaKey: Buffer | Uint8Array) => {
|
||||
/**
|
||||
* Generate a binary node that will request the phone to re-upload the media & return the newly uploaded URL
|
||||
*/
|
||||
export const encryptMediaRetryRequest = async(
|
||||
key: proto.IMessageKey,
|
||||
mediaKey: Buffer | Uint8Array,
|
||||
meId: string
|
||||
) => {
|
||||
export const encryptMediaRetryRequest = async (key: proto.IMessageKey, mediaKey: Buffer | Uint8Array, meId: string) => {
|
||||
const recp: proto.IServerErrorReceipt = { stanzaId: key.id }
|
||||
const recpBuffer = proto.ServerErrorReceipt.encode(recp).finish()
|
||||
|
||||
@@ -698,17 +668,17 @@ export const encryptMediaRetryRequest = async(
|
||||
// keeping it here to maintain parity with WA Web
|
||||
{
|
||||
tag: 'encrypt',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: [
|
||||
{ tag: 'enc_p', attrs: { }, content: ciphertext },
|
||||
{ tag: 'enc_iv', attrs: { }, content: iv }
|
||||
{ tag: 'enc_p', attrs: {}, content: ciphertext },
|
||||
{ tag: 'enc_iv', attrs: {}, content: iv }
|
||||
]
|
||||
},
|
||||
{
|
||||
tag: 'rmr',
|
||||
attrs: {
|
||||
jid: key.remoteJid!,
|
||||
'from_me': (!!key.fromMe).toString(),
|
||||
from_me: (!!key.fromMe).toString(),
|
||||
// @ts-ignore
|
||||
participant: key.participant || undefined
|
||||
}
|
||||
@@ -732,17 +702,17 @@ export const decodeMediaRetryNode = (node: BinaryNode) => {
|
||||
}
|
||||
|
||||
const errorNode = getBinaryNodeChild(node, 'error')
|
||||
if(errorNode) {
|
||||
if (errorNode) {
|
||||
const errorCode = +errorNode.attrs.code
|
||||
event.error = new Boom(
|
||||
`Failed to re-upload media (${errorCode})`,
|
||||
{ data: errorNode.attrs, statusCode: getStatusCodeForMediaRetry(errorCode) }
|
||||
)
|
||||
event.error = new Boom(`Failed to re-upload media (${errorCode})`, {
|
||||
data: errorNode.attrs,
|
||||
statusCode: getStatusCodeForMediaRetry(errorCode)
|
||||
})
|
||||
} else {
|
||||
const encryptedInfoNode = getBinaryNodeChild(node, 'encrypt')
|
||||
const ciphertext = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_p')
|
||||
const iv = getBinaryNodeChildBuffer(encryptedInfoNode, 'enc_iv')
|
||||
if(ciphertext && iv) {
|
||||
if (ciphertext && iv) {
|
||||
event.media = { ciphertext, iv }
|
||||
} else {
|
||||
event.error = new Boom('Failed to re-upload media (missing ciphertext)', { statusCode: 404 })
|
||||
@@ -752,8 +722,8 @@ export const decodeMediaRetryNode = (node: BinaryNode) => {
|
||||
return event
|
||||
}
|
||||
|
||||
export const decryptMediaRetryData = async(
|
||||
{ ciphertext, iv }: { ciphertext: Uint8Array, iv: Uint8Array },
|
||||
export const decryptMediaRetryData = async (
|
||||
{ ciphertext, iv }: { ciphertext: Uint8Array; iv: Uint8Array },
|
||||
mediaKey: Uint8Array,
|
||||
msgId: string
|
||||
) => {
|
||||
@@ -768,5 +738,5 @@ const MEDIA_RETRY_STATUS_MAP = {
|
||||
[proto.MediaRetryNotification.ResultType.SUCCESS]: 200,
|
||||
[proto.MediaRetryNotification.ResultType.DECRYPTION_ERROR]: 412,
|
||||
[proto.MediaRetryNotification.ResultType.NOT_FOUND]: 404,
|
||||
[proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418,
|
||||
} as const
|
||||
[proto.MediaRetryNotification.ResultType.GENERAL_ERROR]: 418
|
||||
} as const
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -27,7 +27,7 @@ export const makeNoiseHandler = ({
|
||||
logger = logger.child({ class: 'ns' })
|
||||
|
||||
const authenticate = (data: Uint8Array) => {
|
||||
if(!isFinished) {
|
||||
if (!isFinished) {
|
||||
hash = sha256(Buffer.concat([hash, data]))
|
||||
}
|
||||
}
|
||||
@@ -47,7 +47,7 @@ export const makeNoiseHandler = ({
|
||||
const iv = generateIV(isFinished ? readCounter : writeCounter)
|
||||
const result = aesDecryptGCM(ciphertext, decKey, iv, hash)
|
||||
|
||||
if(isFinished) {
|
||||
if (isFinished) {
|
||||
readCounter += 1
|
||||
} else {
|
||||
writeCounter += 1
|
||||
@@ -57,12 +57,12 @@ export const makeNoiseHandler = ({
|
||||
return result
|
||||
}
|
||||
|
||||
const localHKDF = async(data: Uint8Array) => {
|
||||
const localHKDF = async (data: Uint8Array) => {
|
||||
const key = await hkdf(Buffer.from(data), 64, { salt, info: '' })
|
||||
return [key.slice(0, 32), key.slice(32)]
|
||||
}
|
||||
|
||||
const mixIntoKey = async(data: Uint8Array) => {
|
||||
const mixIntoKey = async (data: Uint8Array) => {
|
||||
const [write, read] = await localHKDF(data)
|
||||
salt = write
|
||||
encKey = read
|
||||
@@ -71,7 +71,7 @@ export const makeNoiseHandler = ({
|
||||
writeCounter = 0
|
||||
}
|
||||
|
||||
const finishInit = async() => {
|
||||
const finishInit = async () => {
|
||||
const [write, read] = await localHKDF(new Uint8Array(0))
|
||||
encKey = write
|
||||
decKey = read
|
||||
@@ -102,7 +102,7 @@ export const makeNoiseHandler = ({
|
||||
authenticate,
|
||||
mixIntoKey,
|
||||
finishInit,
|
||||
processHandshake: async({ serverHello }: proto.HandshakeMessage, noiseKey: KeyPair) => {
|
||||
processHandshake: async ({ serverHello }: proto.HandshakeMessage, noiseKey: KeyPair) => {
|
||||
authenticate(serverHello!.ephemeral!)
|
||||
await mixIntoKey(Curve.sharedKey(privateKey, serverHello!.ephemeral!))
|
||||
|
||||
@@ -115,7 +115,7 @@ export const makeNoiseHandler = ({
|
||||
|
||||
const { issuerSerial } = proto.CertChain.NoiseCertificate.Details.decode(certIntermediate!.details!)
|
||||
|
||||
if(issuerSerial !== WA_CERT_DETAILS.SERIAL) {
|
||||
if (issuerSerial !== WA_CERT_DETAILS.SERIAL) {
|
||||
throw new Boom('certification match failed', { statusCode: 400 })
|
||||
}
|
||||
|
||||
@@ -125,13 +125,13 @@ export const makeNoiseHandler = ({
|
||||
return keyEnc
|
||||
},
|
||||
encodeFrame: (data: Buffer | Uint8Array) => {
|
||||
if(isFinished) {
|
||||
if (isFinished) {
|
||||
data = encrypt(data)
|
||||
}
|
||||
|
||||
let header: Buffer
|
||||
|
||||
if(routingInfo) {
|
||||
if (routingInfo) {
|
||||
header = Buffer.alloc(7)
|
||||
header.write('ED', 0, 'utf8')
|
||||
header.writeUint8(0, 2)
|
||||
@@ -146,7 +146,7 @@ export const makeNoiseHandler = ({
|
||||
const introSize = sentIntro ? 0 : header.length
|
||||
const frame = Buffer.alloc(introSize + 3 + data.byteLength)
|
||||
|
||||
if(!sentIntro) {
|
||||
if (!sentIntro) {
|
||||
frame.set(header)
|
||||
sentIntro = true
|
||||
}
|
||||
@@ -157,26 +157,26 @@ export const makeNoiseHandler = ({
|
||||
|
||||
return frame
|
||||
},
|
||||
decodeFrame: async(newData: Buffer | Uint8Array, onFrame: (buff: Uint8Array | BinaryNode) => void) => {
|
||||
decodeFrame: async (newData: Buffer | Uint8Array, onFrame: (buff: Uint8Array | BinaryNode) => void) => {
|
||||
// the binary protocol uses its own framing mechanism
|
||||
// on top of the WS frames
|
||||
// so we get this data and separate out the frames
|
||||
const getBytesSize = () => {
|
||||
if(inBytes.length >= 3) {
|
||||
if (inBytes.length >= 3) {
|
||||
return (inBytes.readUInt8() << 16) | inBytes.readUInt16BE(1)
|
||||
}
|
||||
}
|
||||
|
||||
inBytes = Buffer.concat([ inBytes, newData ])
|
||||
inBytes = Buffer.concat([inBytes, newData])
|
||||
|
||||
logger.trace(`recv ${newData.length} bytes, total recv ${inBytes.length} bytes`)
|
||||
|
||||
let size = getBytesSize()
|
||||
while(size && inBytes.length >= size + 3) {
|
||||
while (size && inBytes.length >= size + 3) {
|
||||
let frame: Uint8Array | BinaryNode = inBytes.slice(3, size + 3)
|
||||
inBytes = inBytes.slice(size + 3)
|
||||
|
||||
if(isFinished) {
|
||||
if (isFinished) {
|
||||
const result = decrypt(frame)
|
||||
frame = await decodeBinaryNode(result)
|
||||
}
|
||||
@@ -188,4 +188,4 @@ export const makeNoiseHandler = ({
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
import { AxiosRequestConfig } from 'axios'
|
||||
import { proto } from '../../WAProto'
|
||||
import { AuthenticationCreds, BaileysEventEmitter, CacheStore, Chat, GroupMetadata, ParticipantAction, RequestJoinAction, RequestJoinMethod, SignalKeyStoreWithTransaction, WAMessageStubType } from '../Types'
|
||||
import {
|
||||
AuthenticationCreds,
|
||||
BaileysEventEmitter,
|
||||
CacheStore,
|
||||
Chat,
|
||||
GroupMetadata,
|
||||
ParticipantAction,
|
||||
RequestJoinAction,
|
||||
RequestJoinMethod,
|
||||
SignalKeyStoreWithTransaction,
|
||||
WAMessageStubType
|
||||
} from '../Types'
|
||||
import { getContentType, normalizeMessageContent } from '../Utils/messages'
|
||||
import { areJidsSameUser, isJidBroadcast, isJidStatusBroadcast, jidNormalizedUser } from '../WABinary'
|
||||
import { aesDecryptGCM, hmacSign } from './crypto'
|
||||
@@ -25,9 +36,7 @@ const REAL_MSG_STUB_TYPES = new Set([
|
||||
WAMessageStubType.CALL_MISSED_VOICE
|
||||
])
|
||||
|
||||
const REAL_MSG_REQ_ME_STUB_TYPES = new Set([
|
||||
WAMessageStubType.GROUP_PARTICIPANT_ADD
|
||||
])
|
||||
const REAL_MSG_REQ_ME_STUB_TYPES = new Set([WAMessageStubType.GROUP_PARTICIPANT_ADD])
|
||||
|
||||
/** Cleans a received message to further processing */
|
||||
export const cleanMessage = (message: proto.IWebMessageInfo, meId: string) => {
|
||||
@@ -36,25 +45,25 @@ export const cleanMessage = (message: proto.IWebMessageInfo, meId: string) => {
|
||||
message.key.participant = message.key.participant ? jidNormalizedUser(message.key.participant) : undefined
|
||||
const content = normalizeMessageContent(message.message)
|
||||
// if the message has a reaction, ensure fromMe & remoteJid are from our perspective
|
||||
if(content?.reactionMessage) {
|
||||
if (content?.reactionMessage) {
|
||||
normaliseKey(content.reactionMessage.key!)
|
||||
}
|
||||
|
||||
if(content?.pollUpdateMessage) {
|
||||
if (content?.pollUpdateMessage) {
|
||||
normaliseKey(content.pollUpdateMessage.pollCreationMessageKey!)
|
||||
}
|
||||
|
||||
function normaliseKey(msgKey: proto.IMessageKey) {
|
||||
// if the reaction is from another user
|
||||
// we've to correctly map the key to this user's perspective
|
||||
if(!message.key.fromMe) {
|
||||
if (!message.key.fromMe) {
|
||||
// if the sender believed the message being reacted to is not from them
|
||||
// we've to correct the key to be from them, or some other participant
|
||||
msgKey.fromMe = !msgKey.fromMe
|
||||
? areJidsSameUser(msgKey.participant || msgKey.remoteJid!, meId)
|
||||
// if the message being reacted to, was from them
|
||||
// fromMe automatically becomes false
|
||||
: false
|
||||
: // if the message being reacted to, was from them
|
||||
// fromMe automatically becomes false
|
||||
false
|
||||
// set the remoteJid to being the same as the chat the message came from
|
||||
msgKey.remoteJid = message.key.remoteJid
|
||||
// set participant of the message
|
||||
@@ -67,33 +76,26 @@ export const isRealMessage = (message: proto.IWebMessageInfo, meId: string) => {
|
||||
const normalizedContent = normalizeMessageContent(message.message)
|
||||
const hasSomeContent = !!getContentType(normalizedContent)
|
||||
return (
|
||||
!!normalizedContent
|
||||
|| REAL_MSG_STUB_TYPES.has(message.messageStubType!)
|
||||
|| (
|
||||
REAL_MSG_REQ_ME_STUB_TYPES.has(message.messageStubType!)
|
||||
&& message.messageStubParameters?.some(p => areJidsSameUser(meId, p))
|
||||
)
|
||||
(!!normalizedContent ||
|
||||
REAL_MSG_STUB_TYPES.has(message.messageStubType!) ||
|
||||
(REAL_MSG_REQ_ME_STUB_TYPES.has(message.messageStubType!) &&
|
||||
message.messageStubParameters?.some(p => areJidsSameUser(meId, p)))) &&
|
||||
hasSomeContent &&
|
||||
!normalizedContent?.protocolMessage &&
|
||||
!normalizedContent?.reactionMessage &&
|
||||
!normalizedContent?.pollUpdateMessage
|
||||
)
|
||||
&& hasSomeContent
|
||||
&& !normalizedContent?.protocolMessage
|
||||
&& !normalizedContent?.reactionMessage
|
||||
&& !normalizedContent?.pollUpdateMessage
|
||||
}
|
||||
|
||||
export const shouldIncrementChatUnread = (message: proto.IWebMessageInfo) => (
|
||||
export const shouldIncrementChatUnread = (message: proto.IWebMessageInfo) =>
|
||||
!message.key.fromMe && !message.messageStubType
|
||||
)
|
||||
|
||||
/**
|
||||
* Get the ID of the chat from the given key.
|
||||
* Typically -- that'll be the remoteJid, but for broadcasts, it'll be the participant
|
||||
*/
|
||||
export const getChatId = ({ remoteJid, participant, fromMe }: proto.IMessageKey) => {
|
||||
if(
|
||||
isJidBroadcast(remoteJid!)
|
||||
&& !isJidStatusBroadcast(remoteJid!)
|
||||
&& !fromMe
|
||||
) {
|
||||
if (isJidBroadcast(remoteJid!) && !isJidStatusBroadcast(remoteJid!) && !fromMe) {
|
||||
return participant!
|
||||
}
|
||||
|
||||
@@ -119,22 +121,15 @@ type PollContext = {
|
||||
*/
|
||||
export function decryptPollVote(
|
||||
{ encPayload, encIv }: proto.Message.IPollEncValue,
|
||||
{
|
||||
pollCreatorJid,
|
||||
pollMsgId,
|
||||
pollEncKey,
|
||||
voterJid,
|
||||
}: PollContext
|
||||
{ pollCreatorJid, pollMsgId, pollEncKey, voterJid }: PollContext
|
||||
) {
|
||||
const sign = Buffer.concat(
|
||||
[
|
||||
toBinary(pollMsgId),
|
||||
toBinary(pollCreatorJid),
|
||||
toBinary(voterJid),
|
||||
toBinary('Poll Vote'),
|
||||
new Uint8Array([1])
|
||||
]
|
||||
)
|
||||
const sign = Buffer.concat([
|
||||
toBinary(pollMsgId),
|
||||
toBinary(pollCreatorJid),
|
||||
toBinary(voterJid),
|
||||
toBinary('Poll Vote'),
|
||||
new Uint8Array([1])
|
||||
])
|
||||
|
||||
const key0 = hmacSign(pollEncKey, new Uint8Array(32), 'sha256')
|
||||
const decKey = hmacSign(sign, key0, 'sha256')
|
||||
@@ -148,17 +143,9 @@ export function decryptPollVote(
|
||||
}
|
||||
}
|
||||
|
||||
const processMessage = async(
|
||||
const processMessage = async (
|
||||
message: proto.IWebMessageInfo,
|
||||
{
|
||||
shouldProcessHistoryMsg,
|
||||
placeholderResendCache,
|
||||
ev,
|
||||
creds,
|
||||
keyStore,
|
||||
logger,
|
||||
options
|
||||
}: ProcessMessageContext
|
||||
{ shouldProcessHistoryMsg, placeholderResendCache, ev, creds, keyStore, logger, options }: ProcessMessageContext
|
||||
) => {
|
||||
const meId = creds.me!.id
|
||||
const { accountSettings } = creds
|
||||
@@ -166,11 +153,11 @@ const processMessage = async(
|
||||
const chat: Partial<Chat> = { id: jidNormalizedUser(getChatId(message.key)) }
|
||||
const isRealMsg = isRealMessage(message, meId)
|
||||
|
||||
if(isRealMsg) {
|
||||
if (isRealMsg) {
|
||||
chat.messages = [{ message }]
|
||||
chat.conversationTimestamp = toNumber(message.messageTimestamp)
|
||||
// only increment unread count if not CIPHERTEXT and from another person
|
||||
if(shouldIncrementChatUnread(message)) {
|
||||
if (shouldIncrementChatUnread(message)) {
|
||||
chat.unreadCount = (chat.unreadCount || 0) + 1
|
||||
}
|
||||
}
|
||||
@@ -179,63 +166,56 @@ const processMessage = async(
|
||||
|
||||
// unarchive chat if it's a real message, or someone reacted to our message
|
||||
// and we've the unarchive chats setting on
|
||||
if(
|
||||
(isRealMsg || content?.reactionMessage?.key?.fromMe)
|
||||
&& accountSettings?.unarchiveChats
|
||||
) {
|
||||
if ((isRealMsg || content?.reactionMessage?.key?.fromMe) && accountSettings?.unarchiveChats) {
|
||||
chat.archived = false
|
||||
chat.readOnly = false
|
||||
}
|
||||
|
||||
const protocolMsg = content?.protocolMessage
|
||||
if(protocolMsg) {
|
||||
if (protocolMsg) {
|
||||
switch (protocolMsg.type) {
|
||||
case proto.Message.ProtocolMessage.Type.HISTORY_SYNC_NOTIFICATION:
|
||||
const histNotification = protocolMsg.historySyncNotification!
|
||||
const process = shouldProcessHistoryMsg
|
||||
const isLatest = !creds.processedHistoryMessages?.length
|
||||
case proto.Message.ProtocolMessage.Type.HISTORY_SYNC_NOTIFICATION:
|
||||
const histNotification = protocolMsg.historySyncNotification!
|
||||
const process = shouldProcessHistoryMsg
|
||||
const isLatest = !creds.processedHistoryMessages?.length
|
||||
|
||||
logger?.info({
|
||||
histNotification,
|
||||
process,
|
||||
id: message.key.id,
|
||||
isLatest,
|
||||
}, 'got history notification')
|
||||
logger?.info(
|
||||
{
|
||||
histNotification,
|
||||
process,
|
||||
id: message.key.id,
|
||||
isLatest
|
||||
},
|
||||
'got history notification'
|
||||
)
|
||||
|
||||
if(process) {
|
||||
if(histNotification.syncType !== proto.HistorySync.HistorySyncType.ON_DEMAND) {
|
||||
ev.emit('creds.update', {
|
||||
processedHistoryMessages: [
|
||||
...(creds.processedHistoryMessages || []),
|
||||
{ key: message.key, messageTimestamp: message.messageTimestamp }
|
||||
]
|
||||
if (process) {
|
||||
if (histNotification.syncType !== proto.HistorySync.HistorySyncType.ON_DEMAND) {
|
||||
ev.emit('creds.update', {
|
||||
processedHistoryMessages: [
|
||||
...(creds.processedHistoryMessages || []),
|
||||
{ key: message.key, messageTimestamp: message.messageTimestamp }
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
const data = await downloadAndProcessHistorySyncNotification(histNotification, options)
|
||||
|
||||
ev.emit('messaging-history.set', {
|
||||
...data,
|
||||
isLatest: histNotification.syncType !== proto.HistorySync.HistorySyncType.ON_DEMAND ? isLatest : undefined,
|
||||
peerDataRequestSessionId: histNotification.peerDataRequestSessionId
|
||||
})
|
||||
}
|
||||
|
||||
const data = await downloadAndProcessHistorySyncNotification(
|
||||
histNotification,
|
||||
options
|
||||
)
|
||||
|
||||
ev.emit('messaging-history.set', {
|
||||
...data,
|
||||
isLatest:
|
||||
histNotification.syncType !== proto.HistorySync.HistorySyncType.ON_DEMAND
|
||||
? isLatest
|
||||
: undefined,
|
||||
peerDataRequestSessionId: histNotification.peerDataRequestSessionId
|
||||
})
|
||||
}
|
||||
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.APP_STATE_SYNC_KEY_SHARE:
|
||||
const keys = protocolMsg.appStateSyncKeyShare!.keys
|
||||
if(keys?.length) {
|
||||
let newAppStateSyncKeyId = ''
|
||||
await keyStore.transaction(
|
||||
async() => {
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.APP_STATE_SYNC_KEY_SHARE:
|
||||
const keys = protocolMsg.appStateSyncKeyShare!.keys
|
||||
if (keys?.length) {
|
||||
let newAppStateSyncKeyId = ''
|
||||
await keyStore.transaction(async () => {
|
||||
const newKeys: string[] = []
|
||||
for(const { keyData, keyId } of keys) {
|
||||
for (const { keyData, keyId } of keys) {
|
||||
const strKeyId = Buffer.from(keyId!.keyId!).toString('base64')
|
||||
newKeys.push(strKeyId)
|
||||
|
||||
@@ -244,65 +224,59 @@ const processMessage = async(
|
||||
newAppStateSyncKeyId = strKeyId
|
||||
}
|
||||
|
||||
logger?.info(
|
||||
{ newAppStateSyncKeyId, newKeys },
|
||||
'injecting new app state sync keys'
|
||||
)
|
||||
}
|
||||
)
|
||||
logger?.info({ newAppStateSyncKeyId, newKeys }, 'injecting new app state sync keys')
|
||||
})
|
||||
|
||||
ev.emit('creds.update', { myAppStateKeyId: newAppStateSyncKeyId })
|
||||
} else {
|
||||
logger?.info({ protocolMsg }, 'recv app state sync with 0 keys')
|
||||
}
|
||||
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.REVOKE:
|
||||
ev.emit('messages.update', [
|
||||
{
|
||||
key: {
|
||||
...message.key,
|
||||
id: protocolMsg.key!.id
|
||||
},
|
||||
update: { message: null, messageStubType: WAMessageStubType.REVOKE, key: message.key }
|
||||
ev.emit('creds.update', { myAppStateKeyId: newAppStateSyncKeyId })
|
||||
} else {
|
||||
logger?.info({ protocolMsg }, 'recv app state sync with 0 keys')
|
||||
}
|
||||
])
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.EPHEMERAL_SETTING:
|
||||
Object.assign(chat, {
|
||||
ephemeralSettingTimestamp: toNumber(message.messageTimestamp),
|
||||
ephemeralExpiration: protocolMsg.ephemeralExpiration || null
|
||||
})
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.PEER_DATA_OPERATION_REQUEST_RESPONSE_MESSAGE:
|
||||
const response = protocolMsg.peerDataOperationRequestResponseMessage!
|
||||
if(response) {
|
||||
placeholderResendCache?.del(response.stanzaId!)
|
||||
// TODO: IMPLEMENT HISTORY SYNC ETC (sticker uploads etc.).
|
||||
const { peerDataOperationResult } = response
|
||||
for(const result of peerDataOperationResult!) {
|
||||
const { placeholderMessageResendResponse: retryResponse } = result
|
||||
//eslint-disable-next-line max-depth
|
||||
if(retryResponse) {
|
||||
const webMessageInfo = proto.WebMessageInfo.decode(retryResponse.webMessageInfoBytes!)
|
||||
// wait till another upsert event is available, don't want it to be part of the PDO response message
|
||||
setTimeout(() => {
|
||||
ev.emit('messages.upsert', {
|
||||
messages: [webMessageInfo],
|
||||
type: 'notify',
|
||||
requestId: response.stanzaId!
|
||||
})
|
||||
}, 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case proto.Message.ProtocolMessage.Type.MESSAGE_EDIT:
|
||||
ev.emit(
|
||||
'messages.update',
|
||||
[
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.REVOKE:
|
||||
ev.emit('messages.update', [
|
||||
{
|
||||
// flip the sender / fromMe properties because they're in the perspective of the sender
|
||||
key: {
|
||||
...message.key,
|
||||
id: protocolMsg.key!.id
|
||||
},
|
||||
update: { message: null, messageStubType: WAMessageStubType.REVOKE, key: message.key }
|
||||
}
|
||||
])
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.EPHEMERAL_SETTING:
|
||||
Object.assign(chat, {
|
||||
ephemeralSettingTimestamp: toNumber(message.messageTimestamp),
|
||||
ephemeralExpiration: protocolMsg.ephemeralExpiration || null
|
||||
})
|
||||
break
|
||||
case proto.Message.ProtocolMessage.Type.PEER_DATA_OPERATION_REQUEST_RESPONSE_MESSAGE:
|
||||
const response = protocolMsg.peerDataOperationRequestResponseMessage!
|
||||
if (response) {
|
||||
placeholderResendCache?.del(response.stanzaId!)
|
||||
// TODO: IMPLEMENT HISTORY SYNC ETC (sticker uploads etc.).
|
||||
const { peerDataOperationResult } = response
|
||||
for (const result of peerDataOperationResult!) {
|
||||
const { placeholderMessageResendResponse: retryResponse } = result
|
||||
//eslint-disable-next-line max-depth
|
||||
if (retryResponse) {
|
||||
const webMessageInfo = proto.WebMessageInfo.decode(retryResponse.webMessageInfoBytes!)
|
||||
// wait till another upsert event is available, don't want it to be part of the PDO response message
|
||||
setTimeout(() => {
|
||||
ev.emit('messages.upsert', {
|
||||
messages: [webMessageInfo],
|
||||
type: 'notify',
|
||||
requestId: response.stanzaId!
|
||||
})
|
||||
}, 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case proto.Message.ProtocolMessage.Type.MESSAGE_EDIT:
|
||||
ev.emit('messages.update', [
|
||||
{
|
||||
// flip the sender / fromMe properties because they're in the perspective of the sender
|
||||
key: { ...message.key, id: protocolMsg.key?.id },
|
||||
update: {
|
||||
message: {
|
||||
@@ -315,26 +289,26 @@ const processMessage = async(
|
||||
: message.messageTimestamp
|
||||
}
|
||||
}
|
||||
]
|
||||
)
|
||||
break
|
||||
])
|
||||
break
|
||||
}
|
||||
} else if(content?.reactionMessage) {
|
||||
} else if (content?.reactionMessage) {
|
||||
const reaction: proto.IReaction = {
|
||||
...content.reactionMessage,
|
||||
key: message.key,
|
||||
key: message.key
|
||||
}
|
||||
ev.emit('messages.reaction', [{
|
||||
reaction,
|
||||
key: content.reactionMessage?.key!,
|
||||
}])
|
||||
} else if(message.messageStubType) {
|
||||
ev.emit('messages.reaction', [
|
||||
{
|
||||
reaction,
|
||||
key: content.reactionMessage?.key!
|
||||
}
|
||||
])
|
||||
} else if (message.messageStubType) {
|
||||
const jid = message.key?.remoteJid!
|
||||
//let actor = whatsappID (message.participant)
|
||||
let participants: string[]
|
||||
const emitParticipantsUpdate = (action: ParticipantAction) => (
|
||||
const emitParticipantsUpdate = (action: ParticipantAction) =>
|
||||
ev.emit('group-participants.update', { id: jid, author: message.participant!, participants, action })
|
||||
)
|
||||
const emitGroupUpdate = (update: Partial<GroupMetadata>) => {
|
||||
ev.emit('groups.update', [{ id: jid, ...update, author: message.participant ?? undefined }])
|
||||
}
|
||||
@@ -346,76 +320,75 @@ const processMessage = async(
|
||||
const participantsIncludesMe = () => participants.find(jid => areJidsSameUser(meId, jid))
|
||||
|
||||
switch (message.messageStubType) {
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_CHANGE_NUMBER:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('modify')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_LEAVE:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_REMOVE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('remove')
|
||||
// mark the chat read only if you left the group
|
||||
if(participantsIncludesMe()) {
|
||||
chat.readOnly = true
|
||||
}
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_CHANGE_NUMBER:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('modify')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_LEAVE:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_REMOVE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('remove')
|
||||
// mark the chat read only if you left the group
|
||||
if (participantsIncludesMe()) {
|
||||
chat.readOnly = true
|
||||
}
|
||||
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_ADD:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_INVITE:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_ADD_REQUEST_JOIN:
|
||||
participants = message.messageStubParameters || []
|
||||
if(participantsIncludesMe()) {
|
||||
chat.readOnly = false
|
||||
}
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_ADD:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_INVITE:
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_ADD_REQUEST_JOIN:
|
||||
participants = message.messageStubParameters || []
|
||||
if (participantsIncludesMe()) {
|
||||
chat.readOnly = false
|
||||
}
|
||||
|
||||
emitParticipantsUpdate('add')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_DEMOTE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('demote')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_PROMOTE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('promote')
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_ANNOUNCE:
|
||||
const announceValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ announce: announceValue === 'true' || announceValue === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_RESTRICT:
|
||||
const restrictValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ restrict: restrictValue === 'true' || restrictValue === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_SUBJECT:
|
||||
const name = message.messageStubParameters?.[0]
|
||||
chat.name = name
|
||||
emitGroupUpdate({ subject: name })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_DESCRIPTION:
|
||||
const description = message.messageStubParameters?.[0]
|
||||
chat.description = description
|
||||
emitGroupUpdate({ desc: description })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_INVITE_LINK:
|
||||
const code = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ inviteCode: code })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBER_ADD_MODE:
|
||||
const memberAddValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ memberAddMode: memberAddValue === 'all_member_add' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_MODE:
|
||||
const approvalMode = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ joinApprovalMode: approvalMode === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD:
|
||||
const participant = message.messageStubParameters?.[0] as string
|
||||
const action = message.messageStubParameters?.[1] as RequestJoinAction
|
||||
const method = message.messageStubParameters?.[2] as RequestJoinMethod
|
||||
emitGroupRequestJoin(participant, action, method)
|
||||
break
|
||||
emitParticipantsUpdate('add')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_DEMOTE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('demote')
|
||||
break
|
||||
case WAMessageStubType.GROUP_PARTICIPANT_PROMOTE:
|
||||
participants = message.messageStubParameters || []
|
||||
emitParticipantsUpdate('promote')
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_ANNOUNCE:
|
||||
const announceValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ announce: announceValue === 'true' || announceValue === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_RESTRICT:
|
||||
const restrictValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ restrict: restrictValue === 'true' || restrictValue === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_SUBJECT:
|
||||
const name = message.messageStubParameters?.[0]
|
||||
chat.name = name
|
||||
emitGroupUpdate({ subject: name })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_DESCRIPTION:
|
||||
const description = message.messageStubParameters?.[0]
|
||||
chat.description = description
|
||||
emitGroupUpdate({ desc: description })
|
||||
break
|
||||
case WAMessageStubType.GROUP_CHANGE_INVITE_LINK:
|
||||
const code = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ inviteCode: code })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBER_ADD_MODE:
|
||||
const memberAddValue = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ memberAddMode: memberAddValue === 'all_member_add' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_MODE:
|
||||
const approvalMode = message.messageStubParameters?.[0]
|
||||
emitGroupUpdate({ joinApprovalMode: approvalMode === 'on' })
|
||||
break
|
||||
case WAMessageStubType.GROUP_MEMBERSHIP_JOIN_APPROVAL_REQUEST_NON_ADMIN_ADD:
|
||||
const participant = message.messageStubParameters?.[0] as string
|
||||
const action = message.messageStubParameters?.[1] as RequestJoinAction
|
||||
const method = message.messageStubParameters?.[2] as RequestJoinMethod
|
||||
emitGroupRequestJoin(participant, action, method)
|
||||
break
|
||||
}
|
||||
|
||||
} /* else if(content?.pollUpdateMessage) {
|
||||
const creationMsgKey = content.pollUpdateMessage.pollCreationMessageKey!
|
||||
// we need to fetch the poll creation message to get the poll enc key
|
||||
@@ -466,7 +439,7 @@ const processMessage = async(
|
||||
}
|
||||
} */
|
||||
|
||||
if(Object.keys(chat).length > 1) {
|
||||
if (Object.keys(chat).length > 1) {
|
||||
ev.emit('chats.update', [chat])
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,39 @@
|
||||
import { chunk } from 'lodash'
|
||||
import { KEY_BUNDLE_TYPE } from '../Defaults'
|
||||
import { SignalRepository } from '../Types'
|
||||
import { AuthenticationCreds, AuthenticationState, KeyPair, SignalIdentity, SignalKeyStore, SignedKeyPair } from '../Types/Auth'
|
||||
import { assertNodeErrorFree, BinaryNode, getBinaryNodeChild, getBinaryNodeChildBuffer, getBinaryNodeChildren, getBinaryNodeChildUInt, jidDecode, JidWithDevice, S_WHATSAPP_NET } from '../WABinary'
|
||||
import {
|
||||
AuthenticationCreds,
|
||||
AuthenticationState,
|
||||
KeyPair,
|
||||
SignalIdentity,
|
||||
SignalKeyStore,
|
||||
SignedKeyPair
|
||||
} from '../Types/Auth'
|
||||
import {
|
||||
assertNodeErrorFree,
|
||||
BinaryNode,
|
||||
getBinaryNodeChild,
|
||||
getBinaryNodeChildBuffer,
|
||||
getBinaryNodeChildren,
|
||||
getBinaryNodeChildUInt,
|
||||
jidDecode,
|
||||
JidWithDevice,
|
||||
S_WHATSAPP_NET
|
||||
} from '../WABinary'
|
||||
import { DeviceListData, ParsedDeviceInfo, USyncQueryResultList } from '../WAUSync'
|
||||
import { Curve, generateSignalPubKey } from './crypto'
|
||||
import { encodeBigEndian } from './generics'
|
||||
|
||||
export const createSignalIdentity = (
|
||||
wid: string,
|
||||
accountSignatureKey: Uint8Array
|
||||
): SignalIdentity => {
|
||||
export const createSignalIdentity = (wid: string, accountSignatureKey: Uint8Array): SignalIdentity => {
|
||||
return {
|
||||
identifier: { name: wid, deviceId: 0 },
|
||||
identifierKey: generateSignalPubKey(accountSignatureKey)
|
||||
}
|
||||
}
|
||||
|
||||
export const getPreKeys = async({ get }: SignalKeyStore, min: number, limit: number) => {
|
||||
export const getPreKeys = async ({ get }: SignalKeyStore, min: number, limit: number) => {
|
||||
const idList: string[] = []
|
||||
for(let id = min; id < limit;id++) {
|
||||
for (let id = min; id < limit; id++) {
|
||||
idList.push(id.toString())
|
||||
}
|
||||
|
||||
@@ -30,9 +44,9 @@ export const generateOrGetPreKeys = (creds: AuthenticationCreds, range: number)
|
||||
const avaliable = creds.nextPreKeyId - creds.firstUnuploadedPreKeyId
|
||||
const remaining = range - avaliable
|
||||
const lastPreKeyId = creds.nextPreKeyId + remaining - 1
|
||||
const newPreKeys: { [id: number]: KeyPair } = { }
|
||||
if(remaining > 0) {
|
||||
for(let i = creds.nextPreKeyId;i <= lastPreKeyId;i++) {
|
||||
const newPreKeys: { [id: number]: KeyPair } = {}
|
||||
if (remaining > 0) {
|
||||
for (let i = creds.nextPreKeyId; i <= lastPreKeyId; i++) {
|
||||
newPreKeys[i] = Curve.generateKeyPair()
|
||||
}
|
||||
}
|
||||
@@ -40,46 +54,40 @@ export const generateOrGetPreKeys = (creds: AuthenticationCreds, range: number)
|
||||
return {
|
||||
newPreKeys,
|
||||
lastPreKeyId,
|
||||
preKeysRange: [creds.firstUnuploadedPreKeyId, range] as const,
|
||||
preKeysRange: [creds.firstUnuploadedPreKeyId, range] as const
|
||||
}
|
||||
}
|
||||
|
||||
export const xmppSignedPreKey = (key: SignedKeyPair): BinaryNode => (
|
||||
{
|
||||
tag: 'skey',
|
||||
attrs: { },
|
||||
content: [
|
||||
{ tag: 'id', attrs: { }, content: encodeBigEndian(key.keyId, 3) },
|
||||
{ tag: 'value', attrs: { }, content: key.keyPair.public },
|
||||
{ tag: 'signature', attrs: { }, content: key.signature }
|
||||
]
|
||||
}
|
||||
)
|
||||
export const xmppSignedPreKey = (key: SignedKeyPair): BinaryNode => ({
|
||||
tag: 'skey',
|
||||
attrs: {},
|
||||
content: [
|
||||
{ tag: 'id', attrs: {}, content: encodeBigEndian(key.keyId, 3) },
|
||||
{ tag: 'value', attrs: {}, content: key.keyPair.public },
|
||||
{ tag: 'signature', attrs: {}, content: key.signature }
|
||||
]
|
||||
})
|
||||
|
||||
export const xmppPreKey = (pair: KeyPair, id: number): BinaryNode => (
|
||||
{
|
||||
tag: 'key',
|
||||
attrs: { },
|
||||
content: [
|
||||
{ tag: 'id', attrs: { }, content: encodeBigEndian(id, 3) },
|
||||
{ tag: 'value', attrs: { }, content: pair.public }
|
||||
]
|
||||
}
|
||||
)
|
||||
export const xmppPreKey = (pair: KeyPair, id: number): BinaryNode => ({
|
||||
tag: 'key',
|
||||
attrs: {},
|
||||
content: [
|
||||
{ tag: 'id', attrs: {}, content: encodeBigEndian(id, 3) },
|
||||
{ tag: 'value', attrs: {}, content: pair.public }
|
||||
]
|
||||
})
|
||||
|
||||
export const parseAndInjectE2ESessions = async(
|
||||
node: BinaryNode,
|
||||
repository: SignalRepository
|
||||
) => {
|
||||
const extractKey = (key: BinaryNode) => (
|
||||
key ? ({
|
||||
keyId: getBinaryNodeChildUInt(key, 'id', 3)!,
|
||||
publicKey: generateSignalPubKey(getBinaryNodeChildBuffer(key, 'value')!),
|
||||
signature: getBinaryNodeChildBuffer(key, 'signature')!,
|
||||
}) : undefined
|
||||
)
|
||||
export const parseAndInjectE2ESessions = async (node: BinaryNode, repository: SignalRepository) => {
|
||||
const extractKey = (key: BinaryNode) =>
|
||||
key
|
||||
? {
|
||||
keyId: getBinaryNodeChildUInt(key, 'id', 3)!,
|
||||
publicKey: generateSignalPubKey(getBinaryNodeChildBuffer(key, 'value')!),
|
||||
signature: getBinaryNodeChildBuffer(key, 'signature')!
|
||||
}
|
||||
: undefined
|
||||
const nodes = getBinaryNodeChildren(getBinaryNodeChild(node, 'list'), 'user')
|
||||
for(const node of nodes) {
|
||||
for (const node of nodes) {
|
||||
assertNodeErrorFree(node)
|
||||
}
|
||||
|
||||
@@ -90,27 +98,25 @@ export const parseAndInjectE2ESessions = async(
|
||||
// It's rare case when you need to E2E sessions for so many users, but it's possible
|
||||
const chunkSize = 100
|
||||
const chunks = chunk(nodes, chunkSize)
|
||||
for(const nodesChunk of chunks) {
|
||||
for (const nodesChunk of chunks) {
|
||||
await Promise.all(
|
||||
nodesChunk.map(
|
||||
async node => {
|
||||
const signedKey = getBinaryNodeChild(node, 'skey')!
|
||||
const key = getBinaryNodeChild(node, 'key')!
|
||||
const identity = getBinaryNodeChildBuffer(node, 'identity')!
|
||||
const jid = node.attrs.jid
|
||||
const registrationId = getBinaryNodeChildUInt(node, 'registration', 4)
|
||||
nodesChunk.map(async node => {
|
||||
const signedKey = getBinaryNodeChild(node, 'skey')!
|
||||
const key = getBinaryNodeChild(node, 'key')!
|
||||
const identity = getBinaryNodeChildBuffer(node, 'identity')!
|
||||
const jid = node.attrs.jid
|
||||
const registrationId = getBinaryNodeChildUInt(node, 'registration', 4)
|
||||
|
||||
await repository.injectE2ESession({
|
||||
jid,
|
||||
session: {
|
||||
registrationId: registrationId!,
|
||||
identityKey: generateSignalPubKey(identity),
|
||||
signedPreKey: extractKey(signedKey)!,
|
||||
preKey: extractKey(key)!
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
await repository.injectE2ESession({
|
||||
jid,
|
||||
session: {
|
||||
registrationId: registrationId!,
|
||||
identityKey: generateSignalPubKey(identity),
|
||||
signedPreKey: extractKey(signedKey)!,
|
||||
preKey: extractKey(key)!
|
||||
}
|
||||
})
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -120,14 +126,13 @@ export const extractDeviceJids = (result: USyncQueryResultList[], myJid: string,
|
||||
|
||||
const extracted: JidWithDevice[] = []
|
||||
|
||||
|
||||
for(const userResult of result) {
|
||||
const { devices, id } = userResult as { devices: ParsedDeviceInfo, id: string }
|
||||
for (const userResult of result) {
|
||||
const { devices, id } = userResult as { devices: ParsedDeviceInfo; id: string }
|
||||
const { user } = jidDecode(id)!
|
||||
const deviceList = devices?.deviceList as DeviceListData[]
|
||||
if(Array.isArray(deviceList)) {
|
||||
for(const { id: device, keyIndex } of deviceList) {
|
||||
if(
|
||||
if (Array.isArray(deviceList)) {
|
||||
for (const { id: device, keyIndex } of deviceList) {
|
||||
if (
|
||||
(!excludeZeroDevices || device !== 0) && // if zero devices are not-excluded, or device is non zero
|
||||
(myUser !== user || myDevice !== device) && // either different user or if me user, not this device
|
||||
(device === 0 || !!keyIndex) // ensure that "key-index" is specified for "non-zero" devices, produces a bad req otherwise
|
||||
@@ -145,7 +150,7 @@ export const extractDeviceJids = (result: USyncQueryResultList[], myJid: string,
|
||||
* get the next N keys for upload or processing
|
||||
* @param count number of pre-keys to get or generate
|
||||
*/
|
||||
export const getNextPreKeys = async({ creds, keys }: AuthenticationState, count: number) => {
|
||||
export const getNextPreKeys = async ({ creds, keys }: AuthenticationState, count: number) => {
|
||||
const { newPreKeys, lastPreKeyId, preKeysRange } = generateOrGetPreKeys(creds, count)
|
||||
|
||||
const update: Partial<AuthenticationCreds> = {
|
||||
@@ -160,7 +165,7 @@ export const getNextPreKeys = async({ creds, keys }: AuthenticationState, count:
|
||||
return { update, preKeys }
|
||||
}
|
||||
|
||||
export const getNextPreKeysNode = async(state: AuthenticationState, count: number) => {
|
||||
export const getNextPreKeysNode = async (state: AuthenticationState, count: number) => {
|
||||
const { creds } = state
|
||||
const { update, preKeys } = await getNextPreKeys(state, count)
|
||||
|
||||
@@ -169,13 +174,13 @@ export const getNextPreKeysNode = async(state: AuthenticationState, count: numbe
|
||||
attrs: {
|
||||
xmlns: 'encrypt',
|
||||
type: 'set',
|
||||
to: S_WHATSAPP_NET,
|
||||
to: S_WHATSAPP_NET
|
||||
},
|
||||
content: [
|
||||
{ tag: 'registration', attrs: { }, content: encodeBigEndian(creds.registrationId) },
|
||||
{ tag: 'type', attrs: { }, content: KEY_BUNDLE_TYPE },
|
||||
{ tag: 'identity', attrs: { }, content: creds.signedIdentityKey.public },
|
||||
{ tag: 'list', attrs: { }, content: Object.keys(preKeys).map(k => xmppPreKey(preKeys[+k], +k)) },
|
||||
{ tag: 'registration', attrs: {}, content: encodeBigEndian(creds.registrationId) },
|
||||
{ tag: 'type', attrs: {}, content: KEY_BUNDLE_TYPE },
|
||||
{ tag: 'identity', attrs: {}, content: creds.signedIdentityKey.public },
|
||||
{ tag: 'list', attrs: {}, content: Object.keys(preKeys).map(k => xmppPreKey(preKeys[+k], +k)) },
|
||||
xmppSignedPreKey(creds.signedPreKey)
|
||||
]
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ const fileLocks = new Map<string, Mutex>()
|
||||
// Get or create a mutex for a specific file path
|
||||
const getFileLock = (path: string): Mutex => {
|
||||
let mutex = fileLocks.get(path)
|
||||
if(!mutex) {
|
||||
if (!mutex) {
|
||||
mutex = new Mutex()
|
||||
fileLocks.set(path, mutex)
|
||||
}
|
||||
@@ -30,13 +30,15 @@ const getFileLock = (path: string): Mutex => {
|
||||
* Again, I wouldn't endorse this for any production level use other than perhaps a bot.
|
||||
* Would recommend writing an auth state for use with a proper SQL or No-SQL DB
|
||||
* */
|
||||
export const useMultiFileAuthState = async(folder: string): Promise<{ state: AuthenticationState, saveCreds: () => Promise<void> }> => {
|
||||
export const useMultiFileAuthState = async (
|
||||
folder: string
|
||||
): Promise<{ state: AuthenticationState; saveCreds: () => Promise<void> }> => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const writeData = async(data: any, file: string) => {
|
||||
const writeData = async (data: any, file: string) => {
|
||||
const filePath = join(folder, fixFileName(file)!)
|
||||
const mutex = getFileLock(filePath)
|
||||
|
||||
return mutex.acquire().then(async(release) => {
|
||||
return mutex.acquire().then(async release => {
|
||||
try {
|
||||
await writeFile(filePath, JSON.stringify(data, BufferJSON.replacer))
|
||||
} finally {
|
||||
@@ -45,12 +47,12 @@ export const useMultiFileAuthState = async(folder: string): Promise<{ state: Aut
|
||||
})
|
||||
}
|
||||
|
||||
const readData = async(file: string) => {
|
||||
const readData = async (file: string) => {
|
||||
try {
|
||||
const filePath = join(folder, fixFileName(file)!)
|
||||
const mutex = getFileLock(filePath)
|
||||
|
||||
return await mutex.acquire().then(async(release) => {
|
||||
return await mutex.acquire().then(async release => {
|
||||
try {
|
||||
const data = await readFile(filePath, { encoding: 'utf-8' })
|
||||
return JSON.parse(data, BufferJSON.reviver)
|
||||
@@ -58,32 +60,33 @@ export const useMultiFileAuthState = async(folder: string): Promise<{ state: Aut
|
||||
release()
|
||||
}
|
||||
})
|
||||
} catch(error) {
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
const removeData = async(file: string) => {
|
||||
const removeData = async (file: string) => {
|
||||
try {
|
||||
const filePath = join(folder, fixFileName(file)!)
|
||||
const mutex = getFileLock(filePath)
|
||||
|
||||
return mutex.acquire().then(async(release) => {
|
||||
return mutex.acquire().then(async release => {
|
||||
try {
|
||||
await unlink(filePath)
|
||||
} catch{
|
||||
} catch {
|
||||
} finally {
|
||||
release()
|
||||
}
|
||||
})
|
||||
} catch{
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const folderInfo = await stat(folder).catch(() => { })
|
||||
if(folderInfo) {
|
||||
if(!folderInfo.isDirectory()) {
|
||||
throw new Error(`found something that is not a directory at ${folder}, either delete it or specify a different location`)
|
||||
const folderInfo = await stat(folder).catch(() => {})
|
||||
if (folderInfo) {
|
||||
if (!folderInfo.isDirectory()) {
|
||||
throw new Error(
|
||||
`found something that is not a directory at ${folder}, either delete it or specify a different location`
|
||||
)
|
||||
}
|
||||
} else {
|
||||
await mkdir(folder, { recursive: true })
|
||||
@@ -91,33 +94,31 @@ export const useMultiFileAuthState = async(folder: string): Promise<{ state: Aut
|
||||
|
||||
const fixFileName = (file?: string) => file?.replace(/\//g, '__')?.replace(/:/g, '-')
|
||||
|
||||
const creds: AuthenticationCreds = await readData('creds.json') || initAuthCreds()
|
||||
const creds: AuthenticationCreds = (await readData('creds.json')) || initAuthCreds()
|
||||
|
||||
return {
|
||||
state: {
|
||||
creds,
|
||||
keys: {
|
||||
get: async(type, ids) => {
|
||||
const data: { [_: string]: SignalDataTypeMap[typeof type] } = { }
|
||||
get: async (type, ids) => {
|
||||
const data: { [_: string]: SignalDataTypeMap[typeof type] } = {}
|
||||
await Promise.all(
|
||||
ids.map(
|
||||
async id => {
|
||||
let value = await readData(`${type}-${id}.json`)
|
||||
if(type === 'app-state-sync-key' && value) {
|
||||
value = proto.Message.AppStateSyncKeyData.fromObject(value)
|
||||
}
|
||||
|
||||
data[id] = value
|
||||
ids.map(async id => {
|
||||
let value = await readData(`${type}-${id}.json`)
|
||||
if (type === 'app-state-sync-key' && value) {
|
||||
value = proto.Message.AppStateSyncKeyData.fromObject(value)
|
||||
}
|
||||
)
|
||||
|
||||
data[id] = value
|
||||
})
|
||||
)
|
||||
|
||||
return data
|
||||
},
|
||||
set: async(data) => {
|
||||
set: async data => {
|
||||
const tasks: Promise<void>[] = []
|
||||
for(const category in data) {
|
||||
for(const id in data[category]) {
|
||||
for (const category in data) {
|
||||
for (const id in data[category]) {
|
||||
const value = data[category][id]
|
||||
const file = `${category}-${id}.json`
|
||||
tasks.push(value ? writeData(value, file) : removeData(file))
|
||||
@@ -128,8 +129,8 @@ export const useMultiFileAuthState = async(folder: string): Promise<{ state: Aut
|
||||
}
|
||||
}
|
||||
},
|
||||
saveCreds: async() => {
|
||||
saveCreds: async () => {
|
||||
return writeData(creds, 'creds.json')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ const getUserAgent = (config: SocketConfig): proto.ClientPayload.IUserAgent => {
|
||||
appVersion: {
|
||||
primary: config.version[0],
|
||||
secondary: config.version[1],
|
||||
tertiary: config.version[2],
|
||||
tertiary: config.version[2]
|
||||
},
|
||||
platform: proto.ClientPayload.UserAgent.Platform.WEB,
|
||||
releaseChannel: proto.ClientPayload.UserAgent.ReleaseChannel.RELEASE,
|
||||
@@ -23,30 +23,29 @@ const getUserAgent = (config: SocketConfig): proto.ClientPayload.IUserAgent => {
|
||||
localeLanguageIso6391: 'en',
|
||||
mnc: '000',
|
||||
mcc: '000',
|
||||
localeCountryIso31661Alpha2: config.countryCode,
|
||||
localeCountryIso31661Alpha2: config.countryCode
|
||||
}
|
||||
}
|
||||
|
||||
const PLATFORM_MAP = {
|
||||
'Mac OS': proto.ClientPayload.WebInfo.WebSubPlatform.DARWIN,
|
||||
'Windows': proto.ClientPayload.WebInfo.WebSubPlatform.WIN32
|
||||
Windows: proto.ClientPayload.WebInfo.WebSubPlatform.WIN32
|
||||
}
|
||||
|
||||
const getWebInfo = (config: SocketConfig): proto.ClientPayload.IWebInfo => {
|
||||
let webSubPlatform = proto.ClientPayload.WebInfo.WebSubPlatform.WEB_BROWSER
|
||||
if(config.syncFullHistory && PLATFORM_MAP[config.browser[0]]) {
|
||||
if (config.syncFullHistory && PLATFORM_MAP[config.browser[0]]) {
|
||||
webSubPlatform = PLATFORM_MAP[config.browser[0]]
|
||||
}
|
||||
|
||||
return { webSubPlatform }
|
||||
}
|
||||
|
||||
|
||||
const getClientPayload = (config: SocketConfig) => {
|
||||
const payload: proto.IClientPayload = {
|
||||
connectType: proto.ClientPayload.ConnectType.WIFI_UNKNOWN,
|
||||
connectReason: proto.ClientPayload.ConnectReason.USER_ACTIVATED,
|
||||
userAgent: getUserAgent(config),
|
||||
userAgent: getUserAgent(config)
|
||||
}
|
||||
|
||||
payload.webInfo = getWebInfo(config)
|
||||
@@ -54,7 +53,6 @@ const getClientPayload = (config: SocketConfig) => {
|
||||
return payload
|
||||
}
|
||||
|
||||
|
||||
export const generateLoginNode = (userJid: string, config: SocketConfig): proto.IClientPayload => {
|
||||
const { user, device } = jidDecode(userJid)!
|
||||
const payload: proto.IClientPayload = {
|
||||
@@ -62,7 +60,7 @@ export const generateLoginNode = (userJid: string, config: SocketConfig): proto.
|
||||
passive: false,
|
||||
pull: true,
|
||||
username: +user,
|
||||
device: device,
|
||||
device: device
|
||||
}
|
||||
return proto.ClientPayload.fromObject(payload)
|
||||
}
|
||||
@@ -85,7 +83,7 @@ export const generateRegistrationNode = (
|
||||
const companion: proto.IDeviceProps = {
|
||||
os: config.browser[0],
|
||||
platformType: getPlatformType(config.browser[1]),
|
||||
requireFullSync: config.syncFullHistory,
|
||||
requireFullSync: config.syncFullHistory
|
||||
}
|
||||
|
||||
const companionProto = proto.DeviceProps.encode(companion).finish()
|
||||
@@ -102,8 +100,8 @@ export const generateRegistrationNode = (
|
||||
eIdent: signedIdentityKey.public,
|
||||
eSkeyId: encodeBigEndian(signedPreKey.keyId, 3),
|
||||
eSkeyVal: signedPreKey.keyPair.public,
|
||||
eSkeySig: signedPreKey.signature,
|
||||
},
|
||||
eSkeySig: signedPreKey.signature
|
||||
}
|
||||
}
|
||||
|
||||
return proto.ClientPayload.fromObject(registerPayload)
|
||||
@@ -111,7 +109,11 @@ export const generateRegistrationNode = (
|
||||
|
||||
export const configureSuccessfulPairing = (
|
||||
stanza: BinaryNode,
|
||||
{ advSecretKey, signedIdentityKey, signalIdentities }: Pick<AuthenticationCreds, 'advSecretKey' | 'signedIdentityKey' | 'signalIdentities'>
|
||||
{
|
||||
advSecretKey,
|
||||
signedIdentityKey,
|
||||
signalIdentities
|
||||
}: Pick<AuthenticationCreds, 'advSecretKey' | 'signedIdentityKey' | 'signalIdentities'>
|
||||
) => {
|
||||
const msgId = stanza.attrs.id
|
||||
|
||||
@@ -122,7 +124,7 @@ export const configureSuccessfulPairing = (
|
||||
const deviceNode = getBinaryNodeChild(pairSuccessNode, 'device')
|
||||
const businessNode = getBinaryNodeChild(pairSuccessNode, 'biz')
|
||||
|
||||
if(!deviceIdentityNode || !deviceNode) {
|
||||
if (!deviceIdentityNode || !deviceNode) {
|
||||
throw new Boom('Missing device-identity or device in pair success node', { data: stanza })
|
||||
}
|
||||
|
||||
@@ -132,20 +134,20 @@ export const configureSuccessfulPairing = (
|
||||
const { details, hmac } = proto.ADVSignedDeviceIdentityHMAC.decode(deviceIdentityNode.content as Buffer)
|
||||
// check HMAC matches
|
||||
const advSign = hmacSign(details!, Buffer.from(advSecretKey, 'base64'))
|
||||
if(Buffer.compare(hmac!, advSign) !== 0) {
|
||||
if (Buffer.compare(hmac!, advSign) !== 0) {
|
||||
throw new Boom('Invalid account signature')
|
||||
}
|
||||
|
||||
const account = proto.ADVSignedDeviceIdentity.decode(details!)
|
||||
const { accountSignatureKey, accountSignature, details: deviceDetails } = account
|
||||
// verify the device signature matches
|
||||
const accountMsg = Buffer.concat([ Buffer.from([6, 0]), deviceDetails!, signedIdentityKey.public ])
|
||||
if(!Curve.verify(accountSignatureKey!, accountMsg, accountSignature!)) {
|
||||
const accountMsg = Buffer.concat([Buffer.from([6, 0]), deviceDetails!, signedIdentityKey.public])
|
||||
if (!Curve.verify(accountSignatureKey!, accountMsg, accountSignature!)) {
|
||||
throw new Boom('Failed to verify account signature')
|
||||
}
|
||||
|
||||
// sign the details with our identity key
|
||||
const deviceMsg = Buffer.concat([ Buffer.from([6, 1]), deviceDetails!, signedIdentityKey.public, accountSignatureKey! ])
|
||||
const deviceMsg = Buffer.concat([Buffer.from([6, 1]), deviceDetails!, signedIdentityKey.public, accountSignatureKey!])
|
||||
account.deviceSignature = Curve.sign(signedIdentityKey.private, deviceMsg)
|
||||
|
||||
const identity = createSignalIdentity(jid, accountSignatureKey!)
|
||||
@@ -158,12 +160,12 @@ export const configureSuccessfulPairing = (
|
||||
attrs: {
|
||||
to: S_WHATSAPP_NET,
|
||||
type: 'result',
|
||||
id: msgId,
|
||||
id: msgId
|
||||
},
|
||||
content: [
|
||||
{
|
||||
tag: 'pair-device-sign',
|
||||
attrs: { },
|
||||
attrs: {},
|
||||
content: [
|
||||
{
|
||||
tag: 'device-identity',
|
||||
@@ -178,10 +180,7 @@ export const configureSuccessfulPairing = (
|
||||
const authUpdate: Partial<AuthenticationCreds> = {
|
||||
account,
|
||||
me: { id: jid, name: bizName },
|
||||
signalIdentities: [
|
||||
...(signalIdentities || []),
|
||||
identity
|
||||
],
|
||||
signalIdentities: [...(signalIdentities || []), identity],
|
||||
platform: platformNode?.attrs.name
|
||||
}
|
||||
|
||||
@@ -191,18 +190,13 @@ export const configureSuccessfulPairing = (
|
||||
}
|
||||
}
|
||||
|
||||
export const encodeSignedDeviceIdentity = (
|
||||
account: proto.IADVSignedDeviceIdentity,
|
||||
includeSignatureKey: boolean
|
||||
) => {
|
||||
export const encodeSignedDeviceIdentity = (account: proto.IADVSignedDeviceIdentity, includeSignatureKey: boolean) => {
|
||||
account = { ...account }
|
||||
// set to null if we are not to include the signature key
|
||||
// or if we are including the signature key but it is empty
|
||||
if(!includeSignatureKey || !account.accountSignatureKey?.length) {
|
||||
if (!includeSignatureKey || !account.accountSignatureKey?.length) {
|
||||
account.accountSignatureKey = null
|
||||
}
|
||||
|
||||
return proto.ADVSignedDeviceIdentity
|
||||
.encode(account)
|
||||
.finish()
|
||||
return proto.ADVSignedDeviceIdentity.encode(account).finish()
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user