Scrape Linkedin Followers
Intro
I wrote a script that will scrape all your followers name and linkedin urls and export them as a csv!
How to Use It
Go to: https://www.linkedin.com/mynetwork/invite-connect/connections/
Open your browsers console and copy the code below and paste it into the console. Here's a video to show you how to do that.
Hit enter and let it do it's magic! 🧙♂️
It will take a few minutes to run. When it's done, it will download a csv file with all the data.
The script is pretty slow. At the end, it will look like it's done, but it's not. Just give it a bit and wait for the csv to be downloaded.
How to Hire Me
If you need anything else scraped, I am accepting new clients, send me an email to get a custom scraper/bot created 👉 adrian@thewebscrapingguy.com
function createCSV(jsonData, fileName) {
// Convert JSON to CSV
const csvData = []
// Extract the headers
const headers = Object.keys(jsonData[0])
csvData.push(headers.join(','))
jsonData.forEach((item) => {
const row = []
for (const key in item) {
if (item.hasOwnProperty(key)) {
if (typeof item[key] === 'number') {
row.push(item[key])
continue
}
const value = item[key]?.includes(',')
? `"${item[key].replace(/"/g, '""')}"`
: item[key]
row.push(value)
}
}
csvData.push(row.join(','))
})
// Create a Blob containing the CSV data
const csvBlob = new Blob([csvData.join('\n')], {
type: 'text/csv;charset=utf-8',
})
// Create a URL for the Blob
const csvUrl = URL.createObjectURL(csvBlob)
// Create a link element
const link = document.createElement('a')
link.href = csvUrl
link.target = '_blank'
link.download = fileName
// Append the link to the body
document.body.appendChild(link)
// Trigger a click event on the link
link.click()
// Remove the link and revoke the Blob URL
document.body.removeChild(link)
URL.revokeObjectURL(csvUrl)
}
function getProfiles() {
const allProfiles = []
const profileDivs = document.querySelectorAll('.mn-connection-card__details')
for (let i = 0; i < profileDivs.length; i++) {
const profileDiv = profileDivs[i]
const name = profileDiv
.querySelectorAll('.mn-connection-card__name')[0]
.textContent.trim()
const linkedin_url = profileDiv.querySelectorAll(
'.mn-connection-card__link',
)[0].href
allProfiles.push({ name, linkedin_url })
}
return allProfiles
}
async function scrapeLinkedinFollowers() {
let allProfiles = []
await new Promise((resolve, reject) => {
var totalHeight = 0
var distance = 600
var timer = setInterval(async () => {
var scrollHeightBefore =
document.documentElement.scrollHeight || document.body.scrollHeight
window.scrollBy(0, distance)
const profiles = getProfiles()
console.log(`Found ${profiles.length} profiles!`)
allProfiles.push(...profiles)
// remove duplicates from allProfiles
allProfiles = allProfiles.filter(
(v, i, a) =>
a.findIndex((t) => t.linkedin_url === v.linkedin_url) === i,
)
totalHeight += distance
if (totalHeight >= scrollHeightBefore) {
totalHeight = 0
// Calculate scrollHeight after waiting
var scrollHeightAfter =
document.documentElement.scrollHeight || document.body.scrollHeight
if (scrollHeightAfter > scrollHeightBefore) {
// More content loaded, keep scrolling
return
} else {
// No more content loaded, stop scrolling
console.log('done scrolling!')
await new Promise((resolve) => setTimeout(resolve, 1000))
// load more button
const loadMoreButton = document.querySelector(
'.scaffold-finite-scroll__load-button',
)
if (loadMoreButton) {
loadMoreButton.click()
return
}
await new Promise((resolve) => setTimeout(resolve, 1000))
clearInterval(timer)
resolve()
}
}
}, 400)
})
console.log('DONE!')
console.log(`Found ${allProfiles.length} profiles!`)
console.log(allProfiles)
console.log(
'If you need anything else scraped, hit me up! adrian@thewebscrapingguy.com',
)
createCSV(allProfiles, 'linkedin-followers.csv')
}
await scrapeLinkedinFollowers()