input_file = "naukri/_gulf_location.csv"

prahul11 2023-10-11 18:32:57 +05:30
parent 8885cfb9bf
commit cbbb1ed261
1 changed files with 2 additions and 2 deletions

View File

@ -6,7 +6,7 @@ import csv
import math import math
output_filename_csv = "gulf_data/output_all_gulf.csv" output_filename_csv = "gulf_data/output_all_gulf.csv"
input("remove lien 72 10000 limit wala") # input("remove lien 72 10000 limit wala")
headers = { headers = {
'authority': 'www.naukrigulf.com', 'authority': 'www.naukrigulf.com',
'accept': 'application/json', 'accept': 'application/json',
@ -33,7 +33,7 @@ headers = {
error_pages = [] error_pages = []
keys_to_extract = ['designation', 'jobId', 'company','Companyname', 'Companyid', 'Companyurl','latestPostedDate','isEasyApply','jobSource','location','jdURL','vacancies'] keys_to_extract = ['designation', 'jobId', 'company','Companyname', 'Companyid', 'Companyurl','latestPostedDate','isEasyApply','jobSource','location','jdURL','vacancies']
fields_to_write = ['designation', 'jobId', 'Companyname', 'Companyid', 'Companyurl','latestPostedDate','isEasyApply','jobSource','location','jdURL','vacancies','city'] fields_to_write = ['designation', 'jobId', 'Companyname', 'Companyid', 'Companyurl','latestPostedDate','isEasyApply','jobSource','location','jdURL','vacancies','city']
input_file = "_gulf_location.csv" input_file = "naukri/_gulf_location.csv"
jobs_per_pages = 50 jobs_per_pages = 50
base_url = "https://www.naukrigulf.com/spapi/jobapi/search?Experience=&Keywords=&KeywordsAr=&Limit=50&Location={}&LocationAr=&Offset={}&SortPreference=&breadcrumb=1&locationId=&nationality=&nationalityLabel=&pageNo={}&srchId='" base_url = "https://www.naukrigulf.com/spapi/jobapi/search?Experience=&Keywords=&KeywordsAr=&Limit=50&Location={}&LocationAr=&Offset={}&SortPreference=&breadcrumb=1&locationId=&nationality=&nationalityLabel=&pageNo={}&srchId='"